diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 73d7a3c7cd9..74cae08298b 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -286,6 +286,9 @@ class ClusterFormationTasks { esConfig['node.max_local_storage_nodes'] = node.config.numNodes esConfig['http.port'] = node.config.httpPort esConfig['transport.tcp.port'] = node.config.transportPort + // Default the watermarks to absurdly low to prevent the tests from failing on nodes without enough disk space + esConfig['cluster.routing.allocation.disk.watermark.low'] = '1b' + esConfig['cluster.routing.allocation.disk.watermark.high'] = '1b' esConfig.putAll(node.config.settings) Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 7bede9f390e..146aafceb7f 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 6.0.0-alpha1 -lucene = 6.3.0 +lucene = 6.4.0-snapshot-ec38570 # optional dependencies spatial4j = 0.6 diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java index 2a5efee1881..0ce008908cf 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java @@ -46,7 +46,7 @@ public class TransportNoopBulkAction extends HandledTransportAction listener) { - final int itemCount = request.subRequests().size(); + final int itemCount = request.requests().size(); // simulate at least a realistic amount of data that gets serialized BulkItemResponse[] bulkItemResponses = new BulkItemResponse[itemCount]; for (int idx = 0; idx < itemCount; idx++) { diff --git a/core/licenses/hppc-0.7.1.jar.sha1 b/core/licenses/hppc-0.7.1.jar.sha1 index d3e00e1c476..aa191a6c93b 100644 --- a/core/licenses/hppc-0.7.1.jar.sha1 +++ b/core/licenses/hppc-0.7.1.jar.sha1 @@ -1 +1 @@ -8b5057f74ea378c0150a1860874a3ebdcb713767 +8b5057f74ea378c0150a1860874a3ebdcb713767 \ No newline at end of file diff --git a/core/licenses/jts-1.13.jar.sha1 b/core/licenses/jts-1.13.jar.sha1 index 2899139aab8..5b9e3902cf4 100644 --- a/core/licenses/jts-1.13.jar.sha1 +++ b/core/licenses/jts-1.13.jar.sha1 @@ -1 +1 @@ -3ccfb9b60f04d71add996a666ceb8902904fd805 +3ccfb9b60f04d71add996a666ceb8902904fd805 \ No newline at end of file diff --git a/core/licenses/lucene-analyzers-common-6.3.0.jar.sha1 b/core/licenses/lucene-analyzers-common-6.3.0.jar.sha1 deleted file mode 100644 index 77d6e83314f..00000000000 --- a/core/licenses/lucene-analyzers-common-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -494aed699af238c3872a6b65e17939e9cb7ddbe0 \ No newline at end of file diff --git a/core/licenses/lucene-analyzers-common-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-analyzers-common-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..5cab7b2fef1 --- /dev/null +++ b/core/licenses/lucene-analyzers-common-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +770114e0188dd8b4f30e5878b4f6c8677cecf1be \ No newline at end of file diff --git a/core/licenses/lucene-backward-codecs-6.3.0.jar.sha1 b/core/licenses/lucene-backward-codecs-6.3.0.jar.sha1 deleted file mode 100644 index 8d1640eecf8..00000000000 --- a/core/licenses/lucene-backward-codecs-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -77dede7dff1b833ca2e92d8ab137edb209354d9b \ No newline at end of file diff --git a/core/licenses/lucene-backward-codecs-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-backward-codecs-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..02677cb1ff8 --- /dev/null +++ b/core/licenses/lucene-backward-codecs-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +f4eb0257e8419beaa9f84da6a51375fda4e491f2 \ No newline at end of file diff --git a/core/licenses/lucene-core-6.3.0.jar.sha1 b/core/licenses/lucene-core-6.3.0.jar.sha1 deleted file mode 100644 index b9f5ccfb8d8..00000000000 --- a/core/licenses/lucene-core-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d3c87ea89e2f83e401f9cc7f14e4c43945f7f1e1 \ No newline at end of file diff --git a/core/licenses/lucene-core-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-core-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..ea81fbaeb56 --- /dev/null +++ b/core/licenses/lucene-core-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +c80ad16cd36c41012abb8a8bb1c7328c6d680b4a \ No newline at end of file diff --git a/core/licenses/lucene-grouping-6.3.0.jar.sha1 b/core/licenses/lucene-grouping-6.3.0.jar.sha1 deleted file mode 100644 index 003c3801acd..00000000000 --- a/core/licenses/lucene-grouping-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2c96d59e318ea66838aeb9c5cfb8b4d27b40953c \ No newline at end of file diff --git a/core/licenses/lucene-grouping-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-grouping-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..d4442ded938 --- /dev/null +++ b/core/licenses/lucene-grouping-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +070d4e370f4fe0b8a04b2bce5b4381201b0c783f \ No newline at end of file diff --git a/core/licenses/lucene-highlighter-6.3.0.jar.sha1 b/core/licenses/lucene-highlighter-6.3.0.jar.sha1 deleted file mode 100644 index 0a7d5deac0c..00000000000 --- a/core/licenses/lucene-highlighter-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4f154d8badfe47fe45503c18fb30f2177f758794 \ No newline at end of file diff --git a/core/licenses/lucene-highlighter-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-highlighter-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..e6fc043a287 --- /dev/null +++ b/core/licenses/lucene-highlighter-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +131d9a86f5943675493a85def0e692842f396458 \ No newline at end of file diff --git a/core/licenses/lucene-join-6.3.0.jar.sha1 b/core/licenses/lucene-join-6.3.0.jar.sha1 deleted file mode 100644 index df43f249d16..00000000000 --- a/core/licenses/lucene-join-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -79b898117dcfde2981ec6806e420ff218842eca8 \ No newline at end of file diff --git a/core/licenses/lucene-join-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-join-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..6c90673f498 --- /dev/null +++ b/core/licenses/lucene-join-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +385b2202036b50a764e4d2b032e21496b74a1c8e \ No newline at end of file diff --git a/core/licenses/lucene-memory-6.3.0.jar.sha1 b/core/licenses/lucene-memory-6.3.0.jar.sha1 deleted file mode 100644 index a8a4e5f1dd9..00000000000 --- a/core/licenses/lucene-memory-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -89edeb404e507d640cb13903acff6953199704a2 \ No newline at end of file diff --git a/core/licenses/lucene-memory-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-memory-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..bdb3a168612 --- /dev/null +++ b/core/licenses/lucene-memory-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +e8742a44ef4849a17d5e59ef36e9a52a8f2370c2 \ No newline at end of file diff --git a/core/licenses/lucene-misc-6.3.0.jar.sha1 b/core/licenses/lucene-misc-6.3.0.jar.sha1 deleted file mode 100644 index de4685d9564..00000000000 --- a/core/licenses/lucene-misc-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -02d0e1f5a9df15ac911ad495bad5ea253ab50a9f \ No newline at end of file diff --git a/core/licenses/lucene-misc-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-misc-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..e29fc5f139c --- /dev/null +++ b/core/licenses/lucene-misc-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +7ce2e4948fb66393a34f4200a6131cfde43e47bd \ No newline at end of file diff --git a/core/licenses/lucene-queries-6.3.0.jar.sha1 b/core/licenses/lucene-queries-6.3.0.jar.sha1 deleted file mode 100644 index 8bf5b45a4ea..00000000000 --- a/core/licenses/lucene-queries-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -eb7938233c8103223069c7b5b5f785b4d20ddafa \ No newline at end of file diff --git a/core/licenses/lucene-queries-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-queries-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..4998ff5b2e4 --- /dev/null +++ b/core/licenses/lucene-queries-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +6c1c385a597ce797b0049d9b2281b09593e1488a \ No newline at end of file diff --git a/core/licenses/lucene-queryparser-6.3.0.jar.sha1 b/core/licenses/lucene-queryparser-6.3.0.jar.sha1 deleted file mode 100644 index e2dae1cc8b0..00000000000 --- a/core/licenses/lucene-queryparser-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e979fb02155cbe81a8d335d6dc41d2ef06be68b6 \ No newline at end of file diff --git a/core/licenses/lucene-queryparser-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-queryparser-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..9ba51f22f25 --- /dev/null +++ b/core/licenses/lucene-queryparser-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +fafaa22906c067e6894f9f2b18ad03ded98e2f38 \ No newline at end of file diff --git a/core/licenses/lucene-sandbox-6.3.0.jar.sha1 b/core/licenses/lucene-sandbox-6.3.0.jar.sha1 deleted file mode 100644 index 6baf6baabfe..00000000000 --- a/core/licenses/lucene-sandbox-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -257387c45c6fa2b77fd6931751f93fdcd798ced4 \ No newline at end of file diff --git a/core/licenses/lucene-sandbox-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-sandbox-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..cce2045942b --- /dev/null +++ b/core/licenses/lucene-sandbox-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +19c64a84617f42bb4c11b1e266df4009cd37fdd0 \ No newline at end of file diff --git a/core/licenses/lucene-spatial-6.3.0.jar.sha1 b/core/licenses/lucene-spatial-6.3.0.jar.sha1 deleted file mode 100644 index ff35a066ffd..00000000000 --- a/core/licenses/lucene-spatial-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3cf5fe5402b5e34b240b73501c9e97a82428259e \ No newline at end of file diff --git a/core/licenses/lucene-spatial-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-spatial-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..8169bea2fae --- /dev/null +++ b/core/licenses/lucene-spatial-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +bc8613fb61c0ae95dd3680b0f65e3380c3fd0d6c \ No newline at end of file diff --git a/core/licenses/lucene-spatial-extras-6.3.0.jar.sha1 b/core/licenses/lucene-spatial-extras-6.3.0.jar.sha1 deleted file mode 100644 index 0c52cf09377..00000000000 --- a/core/licenses/lucene-spatial-extras-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1b77ef3740dc885c62d5966fbe9aea1199d344fb \ No newline at end of file diff --git a/core/licenses/lucene-spatial-extras-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-spatial-extras-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..2614704c057 --- /dev/null +++ b/core/licenses/lucene-spatial-extras-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +0fa2c3e722294e863f3c70a15e97a18397391fb4 \ No newline at end of file diff --git a/core/licenses/lucene-spatial3d-6.3.0.jar.sha1 b/core/licenses/lucene-spatial3d-6.3.0.jar.sha1 deleted file mode 100644 index c23003146af..00000000000 --- a/core/licenses/lucene-spatial3d-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -aa94b4a8636b3633008640cc5155ad354aebcea5 \ No newline at end of file diff --git a/core/licenses/lucene-spatial3d-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-spatial3d-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..9b1c45581a1 --- /dev/null +++ b/core/licenses/lucene-spatial3d-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +db74c6313965ffdd10d9b19be2eed4ae2c76d2e3 \ No newline at end of file diff --git a/core/licenses/lucene-suggest-6.3.0.jar.sha1 b/core/licenses/lucene-suggest-6.3.0.jar.sha1 deleted file mode 100644 index 137b8976536..00000000000 --- a/core/licenses/lucene-suggest-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ed5d8ee5cd7edcad5d4ffca2b4540ccc844e9bb0 \ No newline at end of file diff --git a/core/licenses/lucene-suggest-6.4.0-snapshot-ec38570.jar.sha1 b/core/licenses/lucene-suggest-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..91841f474ef --- /dev/null +++ b/core/licenses/lucene-suggest-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +b85ae1121b5fd56df985615a3cdd7b3879e9b92d \ No newline at end of file diff --git a/core/licenses/t-digest-3.0.jar.sha1 b/core/licenses/t-digest-3.0.jar.sha1 index 6deec27800f..ce2f2e2f040 100644 --- a/core/licenses/t-digest-3.0.jar.sha1 +++ b/core/licenses/t-digest-3.0.jar.sha1 @@ -1 +1 @@ -84ccf145ac2215e6bfa63baa3101c0af41017cfc +84ccf145ac2215e6bfa63baa3101c0af41017cfc \ No newline at end of file diff --git a/core/src/main/java/org/apache/lucene/analysis/synonym/GraphTokenStreamFiniteStrings.java b/core/src/main/java/org/apache/lucene/analysis/synonym/GraphTokenStreamFiniteStrings.java new file mode 100644 index 00000000000..3d806588eca --- /dev/null +++ b/core/src/main/java/org/apache/lucene/analysis/synonym/GraphTokenStreamFiniteStrings.java @@ -0,0 +1,291 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.analysis.synonym; + +import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.BytesTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IntsRef; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.FiniteStringsIterator; +import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.util.automaton.Transition; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +/** + * Creates a list of {@link TokenStream} where each stream is the tokens that make up a finite string in graph token stream. To do this, + * the graph token stream is converted to an {@link Automaton} and from there we use a {@link FiniteStringsIterator} to collect the various + * token streams for each finite string. + */ +public class GraphTokenStreamFiniteStrings { + private final Automaton.Builder builder; + Automaton det; + private final Map termToID = new HashMap<>(); + private final Map idToTerm = new HashMap<>(); + private int anyTermID = -1; + + public GraphTokenStreamFiniteStrings() { + this.builder = new Automaton.Builder(); + } + + private static class BytesRefArrayTokenStream extends TokenStream { + private final BytesTermAttribute termAtt = addAttribute(BytesTermAttribute.class); + private final BytesRef[] terms; + private int offset; + + BytesRefArrayTokenStream(BytesRef[] terms) { + this.terms = terms; + offset = 0; + } + + @Override + public boolean incrementToken() throws IOException { + if (offset < terms.length) { + clearAttributes(); + termAtt.setBytesRef(terms[offset]); + offset = offset + 1; + return true; + } + + return false; + } + } + + /** + * Gets + */ + public List getTokenStreams(final TokenStream in) throws IOException { + // build automation + build(in); + + List tokenStreams = new ArrayList<>(); + final FiniteStringsIterator finiteStrings = new FiniteStringsIterator(det); + for (IntsRef string; (string = finiteStrings.next()) != null; ) { + final BytesRef[] tokens = new BytesRef[string.length]; + for (int idx = string.offset, len = string.offset + string.length; idx < len; idx++) { + tokens[idx - string.offset] = idToTerm.get(string.ints[idx]); + } + + tokenStreams.add(new BytesRefArrayTokenStream(tokens)); + } + + return tokenStreams; + } + + private void build(final TokenStream in) throws IOException { + if (det != null) { + throw new IllegalStateException("Automation already built"); + } + + final TermToBytesRefAttribute termBytesAtt = in.addAttribute(TermToBytesRefAttribute.class); + final PositionIncrementAttribute posIncAtt = in.addAttribute(PositionIncrementAttribute.class); + final PositionLengthAttribute posLengthAtt = in.addAttribute(PositionLengthAttribute.class); + final OffsetAttribute offsetAtt = in.addAttribute(OffsetAttribute.class); + + in.reset(); + + int pos = -1; + int lastPos = 0; + int maxOffset = 0; + int maxPos = -1; + int state = -1; + while (in.incrementToken()) { + int posInc = posIncAtt.getPositionIncrement(); + assert pos > -1 || posInc > 0; + + if (posInc > 1) { + throw new IllegalArgumentException("cannot handle holes; to accept any term, use '*' term"); + } + + if (posInc > 0) { + // New node: + pos += posInc; + } + + int endPos = pos + posLengthAtt.getPositionLength(); + while (state < endPos) { + state = createState(); + } + + BytesRef term = termBytesAtt.getBytesRef(); + //System.out.println(pos + "-" + endPos + ": " + term.utf8ToString() + ": posInc=" + posInc); + if (term.length == 1 && term.bytes[term.offset] == (byte) '*') { + addAnyTransition(pos, endPos); + } else { + addTransition(pos, endPos, term); + } + + maxOffset = Math.max(maxOffset, offsetAtt.endOffset()); + maxPos = Math.max(maxPos, endPos); + } + + in.end(); + + // TODO: look at endOffset? ts2a did... + + // TODO: this (setting "last" state as the only accept state) may be too simplistic? + setAccept(state, true); + finish(); + } + + /** + * Returns a new state; state 0 is always the initial state. + */ + private int createState() { + return builder.createState(); + } + + /** + * Marks the specified state as accept or not. + */ + private void setAccept(int state, boolean accept) { + builder.setAccept(state, accept); + } + + /** + * Adds a transition to the automaton. + */ + private void addTransition(int source, int dest, String term) { + addTransition(source, dest, new BytesRef(term)); + } + + /** + * Adds a transition to the automaton. + */ + private void addTransition(int source, int dest, BytesRef term) { + if (term == null) { + throw new NullPointerException("term should not be null"); + } + builder.addTransition(source, dest, getTermID(term)); + } + + /** + * Adds a transition matching any term. + */ + private void addAnyTransition(int source, int dest) { + builder.addTransition(source, dest, getTermID(null)); + } + + /** + * Call this once you are done adding states/transitions. + */ + private void finish() { + finish(DEFAULT_MAX_DETERMINIZED_STATES); + } + + /** + * Call this once you are done adding states/transitions. + * + * @param maxDeterminizedStates Maximum number of states created when determinizing the automaton. Higher numbers allow this operation + * to consume more memory but allow more complex automatons. + */ + private void finish(int maxDeterminizedStates) { + Automaton automaton = builder.finish(); + + // System.out.println("before det:\n" + automaton.toDot()); + + Transition t = new Transition(); + + // TODO: should we add "eps back to initial node" for all states, + // and det that? then we don't need to revisit initial node at + // every position? but automaton could blow up? And, this makes it + // harder to skip useless positions at search time? + + if (anyTermID != -1) { + + // Make sure there are no leading or trailing ANY: + int count = automaton.initTransition(0, t); + for (int i = 0; i < count; i++) { + automaton.getNextTransition(t); + if (anyTermID >= t.min && anyTermID <= t.max) { + throw new IllegalStateException("automaton cannot lead with an ANY transition"); + } + } + + int numStates = automaton.getNumStates(); + for (int i = 0; i < numStates; i++) { + count = automaton.initTransition(i, t); + for (int j = 0; j < count; j++) { + automaton.getNextTransition(t); + if (automaton.isAccept(t.dest) && anyTermID >= t.min && anyTermID <= t.max) { + throw new IllegalStateException("automaton cannot end with an ANY transition"); + } + } + } + + int termCount = termToID.size(); + + // We have to carefully translate these transitions so automaton + // realizes they also match all other terms: + Automaton newAutomaton = new Automaton(); + for (int i = 0; i < numStates; i++) { + newAutomaton.createState(); + newAutomaton.setAccept(i, automaton.isAccept(i)); + } + + for (int i = 0; i < numStates; i++) { + count = automaton.initTransition(i, t); + for (int j = 0; j < count; j++) { + automaton.getNextTransition(t); + int min, max; + if (t.min <= anyTermID && anyTermID <= t.max) { + // Match any term + min = 0; + max = termCount - 1; + } else { + min = t.min; + max = t.max; + } + newAutomaton.addTransition(t.source, t.dest, min, max); + } + } + newAutomaton.finishState(); + automaton = newAutomaton; + } + + det = Operations.removeDeadStates(Operations.determinize(automaton, maxDeterminizedStates)); + } + + private int getTermID(BytesRef term) { + Integer id = termToID.get(term); + if (id == null) { + id = termToID.size(); + if (term != null) { + term = BytesRef.deepCopyOf(term); + } + termToID.put(term, id); + idToTerm.put(id, term); + if (term == null) { + anyTermID = id; + } + } + + return id; + } +} diff --git a/core/src/main/java/org/apache/lucene/analysis/synonym/SynonymGraphFilter.java b/core/src/main/java/org/apache/lucene/analysis/synonym/SynonymGraphFilter.java new file mode 100644 index 00000000000..f2c27679ab6 --- /dev/null +++ b/core/src/main/java/org/apache/lucene/analysis/synonym/SynonymGraphFilter.java @@ -0,0 +1,588 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.analysis.synonym; + +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.analysis.tokenattributes.TypeAttribute; +import org.apache.lucene.store.ByteArrayDataInput; +import org.apache.lucene.util.AttributeSource; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.CharsRefBuilder; +import org.apache.lucene.util.RollingBuffer; +import org.apache.lucene.util.fst.FST; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; + +// TODO: maybe we should resolve token -> wordID then run +// FST on wordIDs, for better perf? + +// TODO: a more efficient approach would be Aho/Corasick's +// algorithm +// http://en.wikipedia.org/wiki/Aho%E2%80%93Corasick_string_matching_algorithm +// It improves over the current approach here +// because it does not fully re-start matching at every +// token. For example if one pattern is "a b c x" +// and another is "b c d" and the input is "a b c d", on +// trying to parse "a b c x" but failing when you got to x, +// rather than starting over again your really should +// immediately recognize that "b c d" matches at the next +// input. I suspect this won't matter that much in +// practice, but it's possible on some set of synonyms it +// will. We'd have to modify Aho/Corasick to enforce our +// conflict resolving (eg greedy matching) because that algo +// finds all matches. This really amounts to adding a .* +// closure to the FST and then determinizing it. +// +// Another possible solution is described at http://www.cis.uni-muenchen.de/people/Schulz/Pub/dictle5.ps + +/** + * Applies single- or multi-token synonyms from a {@link SynonymMap} + * to an incoming {@link TokenStream}, producing a fully correct graph + * output. This is a replacement for {@link SynonymFilter}, which produces + * incorrect graphs for multi-token synonyms. + * + * NOTE: this cannot consume an incoming graph; results will + * be undefined. + */ +public final class SynonymGraphFilter extends TokenFilter { + + public static final String TYPE_SYNONYM = "SYNONYM"; + public static final int GRAPH_FLAG = 8; + + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + private final PositionIncrementAttribute posIncrAtt = addAttribute(PositionIncrementAttribute.class); + private final PositionLengthAttribute posLenAtt = addAttribute(PositionLengthAttribute.class); + private final FlagsAttribute flagsAtt = addAttribute(FlagsAttribute.class); + + private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class); + private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); + + private final SynonymMap synonyms; + private final boolean ignoreCase; + + private final FST fst; + + private final FST.BytesReader fstReader; + private final FST.Arc scratchArc; + private final ByteArrayDataInput bytesReader = new ByteArrayDataInput(); + private final BytesRef scratchBytes = new BytesRef(); + private final CharsRefBuilder scratchChars = new CharsRefBuilder(); + private final LinkedList outputBuffer = new LinkedList<>(); + + private int nextNodeOut; + private int lastNodeOut; + private int maxLookaheadUsed; + + // For testing: + private int captureCount; + + private boolean liveToken; + + // Start/end offset of the current match: + private int matchStartOffset; + private int matchEndOffset; + + // True once the input TokenStream is exhausted: + private boolean finished; + + private int lookaheadNextRead; + private int lookaheadNextWrite; + + private RollingBuffer lookahead = new RollingBuffer() { + @Override + protected BufferedInputToken newInstance() { + return new BufferedInputToken(); + } + }; + + static class BufferedInputToken implements RollingBuffer.Resettable { + final CharsRefBuilder term = new CharsRefBuilder(); + AttributeSource.State state; + int startOffset = -1; + int endOffset = -1; + + @Override + public void reset() { + state = null; + term.clear(); + + // Intentionally invalid to ferret out bugs: + startOffset = -1; + endOffset = -1; + } + } + + static class BufferedOutputToken { + final String term; + + // Non-null if this was an incoming token: + final State state; + + final int startNode; + final int endNode; + + public BufferedOutputToken(State state, String term, int startNode, int endNode) { + this.state = state; + this.term = term; + this.startNode = startNode; + this.endNode = endNode; + } + } + + public SynonymGraphFilter(TokenStream input, SynonymMap synonyms, boolean ignoreCase) { + super(input); + this.synonyms = synonyms; + this.fst = synonyms.fst; + if (fst == null) { + throw new IllegalArgumentException("fst must be non-null"); + } + this.fstReader = fst.getBytesReader(); + scratchArc = new FST.Arc<>(); + this.ignoreCase = ignoreCase; + } + + @Override + public boolean incrementToken() throws IOException { + //System.out.println("\nS: incrToken lastNodeOut=" + lastNodeOut + " nextNodeOut=" + nextNodeOut); + + assert lastNodeOut <= nextNodeOut; + + if (outputBuffer.isEmpty() == false) { + // We still have pending outputs from a prior synonym match: + releaseBufferedToken(); + //System.out.println(" syn: ret buffered=" + this); + assert liveToken == false; + return true; + } + + // Try to parse a new synonym match at the current token: + + if (parse()) { + // A new match was found: + releaseBufferedToken(); + //System.out.println(" syn: after parse, ret buffered=" + this); + assert liveToken == false; + return true; + } + + if (lookaheadNextRead == lookaheadNextWrite) { + + // Fast path: parse pulled one token, but it didn't match + // the start for any synonym, so we now return it "live" w/o having + // cloned all of its atts: + if (finished) { + //System.out.println(" syn: ret END"); + return false; + } + + assert liveToken; + liveToken = false; + + // NOTE: no need to change posInc since it's relative, i.e. whatever + // node our output is upto will just increase by the incoming posInc. + // We also don't need to change posLen, but only because we cannot + // consume a graph, so the incoming token can never span a future + // synonym match. + + } else { + // We still have buffered lookahead tokens from a previous + // parse attempt that required lookahead; just replay them now: + //System.out.println(" restore buffer"); + assert lookaheadNextRead < lookaheadNextWrite : "read=" + lookaheadNextRead + " write=" + lookaheadNextWrite; + BufferedInputToken token = lookahead.get(lookaheadNextRead); + lookaheadNextRead++; + + restoreState(token.state); + + lookahead.freeBefore(lookaheadNextRead); + + //System.out.println(" after restore offset=" + offsetAtt.startOffset() + "-" + offsetAtt.endOffset()); + assert liveToken == false; + } + + lastNodeOut += posIncrAtt.getPositionIncrement(); + nextNodeOut = lastNodeOut + posLenAtt.getPositionLength(); + + //System.out.println(" syn: ret lookahead=" + this); + + return true; + } + + private void releaseBufferedToken() throws IOException { + //System.out.println(" releaseBufferedToken"); + + BufferedOutputToken token = outputBuffer.pollFirst(); + + if (token.state != null) { + // This is an original input token (keepOrig=true case): + //System.out.println(" hasState"); + restoreState(token.state); + //System.out.println(" startOffset=" + offsetAtt.startOffset() + " endOffset=" + offsetAtt.endOffset()); + } else { + clearAttributes(); + //System.out.println(" no state"); + termAtt.append(token.term); + + // We better have a match already: + assert matchStartOffset != -1; + + offsetAtt.setOffset(matchStartOffset, matchEndOffset); + //System.out.println(" startOffset=" + matchStartOffset + " endOffset=" + matchEndOffset); + typeAtt.setType(TYPE_SYNONYM); + } + + //System.out.println(" lastNodeOut=" + lastNodeOut); + //System.out.println(" term=" + termAtt); + + posIncrAtt.setPositionIncrement(token.startNode - lastNodeOut); + lastNodeOut = token.startNode; + posLenAtt.setPositionLength(token.endNode - token.startNode); + flagsAtt.setFlags(flagsAtt.getFlags() | GRAPH_FLAG); // set the graph flag + } + + /** + * Scans the next input token(s) to see if a synonym matches. Returns true + * if a match was found. + */ + private boolean parse() throws IOException { + // System.out.println(Thread.currentThread().getName() + ": S: parse: " + System.identityHashCode(this)); + + // Holds the longest match we've seen so far: + BytesRef matchOutput = null; + int matchInputLength = 0; + + BytesRef pendingOutput = fst.outputs.getNoOutput(); + fst.getFirstArc(scratchArc); + + assert scratchArc.output == fst.outputs.getNoOutput(); + + // How many tokens in the current match + int matchLength = 0; + boolean doFinalCapture = false; + + int lookaheadUpto = lookaheadNextRead; + matchStartOffset = -1; + + byToken: + while (true) { + //System.out.println(" cycle lookaheadUpto=" + lookaheadUpto + " maxPos=" + lookahead.getMaxPos()); + + // Pull next token's chars: + final char[] buffer; + final int bufferLen; + final int inputEndOffset; + + if (lookaheadUpto <= lookahead.getMaxPos()) { + // Still in our lookahead buffer + BufferedInputToken token = lookahead.get(lookaheadUpto); + lookaheadUpto++; + buffer = token.term.chars(); + bufferLen = token.term.length(); + inputEndOffset = token.endOffset; + //System.out.println(" use buffer now max=" + lookahead.getMaxPos()); + if (matchStartOffset == -1) { + matchStartOffset = token.startOffset; + } + } else { + + // We used up our lookahead buffer of input tokens + // -- pull next real input token: + + assert finished || liveToken == false; + + if (finished) { + //System.out.println(" break: finished"); + break; + } else if (input.incrementToken()) { + //System.out.println(" input.incrToken"); + liveToken = true; + buffer = termAtt.buffer(); + bufferLen = termAtt.length(); + if (matchStartOffset == -1) { + matchStartOffset = offsetAtt.startOffset(); + } + inputEndOffset = offsetAtt.endOffset(); + + lookaheadUpto++; + } else { + // No more input tokens + finished = true; + //System.out.println(" break: now set finished"); + break; + } + } + + matchLength++; + //System.out.println(" cycle term=" + new String(buffer, 0, bufferLen)); + + // Run each char in this token through the FST: + int bufUpto = 0; + while (bufUpto < bufferLen) { + final int codePoint = Character.codePointAt(buffer, bufUpto, bufferLen); + if (fst.findTargetArc(ignoreCase ? Character.toLowerCase(codePoint) : codePoint, scratchArc, scratchArc, fstReader) == + null) { + break byToken; + } + + // Accum the output + pendingOutput = fst.outputs.add(pendingOutput, scratchArc.output); + bufUpto += Character.charCount(codePoint); + } + + assert bufUpto == bufferLen; + + // OK, entire token matched; now see if this is a final + // state in the FST (a match): + if (scratchArc.isFinal()) { + matchOutput = fst.outputs.add(pendingOutput, scratchArc.nextFinalOutput); + matchInputLength = matchLength; + matchEndOffset = inputEndOffset; + //System.out.println(" ** match"); + } + + // See if the FST can continue matching (ie, needs to + // see the next input token): + if (fst.findTargetArc(SynonymMap.WORD_SEPARATOR, scratchArc, scratchArc, fstReader) == null) { + // No further rules can match here; we're done + // searching for matching rules starting at the + // current input position. + break; + } else { + // More matching is possible -- accum the output (if + // any) of the WORD_SEP arc: + pendingOutput = fst.outputs.add(pendingOutput, scratchArc.output); + doFinalCapture = true; + if (liveToken) { + capture(); + } + } + } + + if (doFinalCapture && liveToken && finished == false) { + // Must capture the final token if we captured any prior tokens: + capture(); + } + + if (matchOutput != null) { + + if (liveToken) { + // Single input token synonym; we must buffer it now: + capture(); + } + + // There is a match! + bufferOutputTokens(matchOutput, matchInputLength); + lookaheadNextRead += matchInputLength; + //System.out.println(" precmatch; set lookaheadNextRead=" + lookaheadNextRead + " now max=" + lookahead.getMaxPos()); + lookahead.freeBefore(lookaheadNextRead); + //System.out.println(" match; set lookaheadNextRead=" + lookaheadNextRead + " now max=" + lookahead.getMaxPos()); + return true; + } else { + //System.out.println(" no match; lookaheadNextRead=" + lookaheadNextRead); + return false; + } + + //System.out.println(" parse done inputSkipCount=" + inputSkipCount + " nextRead=" + nextRead + " nextWrite=" + nextWrite); + } + + /** + * Expands the output graph into the necessary tokens, adding + * synonyms as side paths parallel to the input tokens, and + * buffers them in the output token buffer. + */ + private void bufferOutputTokens(BytesRef bytes, int matchInputLength) { + bytesReader.reset(bytes.bytes, bytes.offset, bytes.length); + + final int code = bytesReader.readVInt(); + final boolean keepOrig = (code & 0x1) == 0; + //System.out.println(" buffer: keepOrig=" + keepOrig + " matchInputLength=" + matchInputLength); + + // How many nodes along all paths; we need this to assign the + // node ID for the final end node where all paths merge back: + int totalPathNodes; + if (keepOrig) { + assert matchInputLength > 0; + totalPathNodes = matchInputLength - 1; + } else { + totalPathNodes = 0; + } + + // How many synonyms we will insert over this match: + final int count = code >>> 1; + + // TODO: we could encode this instead into the FST: + + // 1st pass: count how many new nodes we need + List> paths = new ArrayList<>(); + for (int outputIDX = 0; outputIDX < count; outputIDX++) { + int wordID = bytesReader.readVInt(); + synonyms.words.get(wordID, scratchBytes); + scratchChars.copyUTF8Bytes(scratchBytes); + int lastStart = 0; + + List path = new ArrayList<>(); + paths.add(path); + int chEnd = scratchChars.length(); + for (int chUpto = 0; chUpto <= chEnd; chUpto++) { + if (chUpto == chEnd || scratchChars.charAt(chUpto) == SynonymMap.WORD_SEPARATOR) { + path.add(new String(scratchChars.chars(), lastStart, chUpto - lastStart)); + lastStart = 1 + chUpto; + } + } + + assert path.size() > 0; + totalPathNodes += path.size() - 1; + } + //System.out.println(" totalPathNodes=" + totalPathNodes); + + // 2nd pass: buffer tokens for the graph fragment + + // NOTE: totalPathNodes will be 0 in the case where the matched + // input is a single token and all outputs are also a single token + + // We "spawn" a side-path for each of the outputs for this matched + // synonym, all ending back at this end node: + + int startNode = nextNodeOut; + + int endNode = startNode + totalPathNodes + 1; + //System.out.println(" " + paths.size() + " new side-paths"); + + // First, fanout all tokens departing start node for these new side paths: + int newNodeCount = 0; + for (List path : paths) { + int pathEndNode; + //System.out.println(" path size=" + path.size()); + if (path.size() == 1) { + // Single token output, so there are no intermediate nodes: + pathEndNode = endNode; + } else { + pathEndNode = nextNodeOut + newNodeCount + 1; + newNodeCount += path.size() - 1; + } + outputBuffer.add(new BufferedOutputToken(null, path.get(0), startNode, pathEndNode)); + } + + // We must do the original tokens last, else the offsets "go backwards": + if (keepOrig) { + BufferedInputToken token = lookahead.get(lookaheadNextRead); + int inputEndNode; + if (matchInputLength == 1) { + // Single token matched input, so there are no intermediate nodes: + inputEndNode = endNode; + } else { + inputEndNode = nextNodeOut + newNodeCount + 1; + } + + //System.out.println(" keepOrig first token: " + token.term); + + outputBuffer.add(new BufferedOutputToken(token.state, token.term.toString(), startNode, inputEndNode)); + } + + nextNodeOut = endNode; + + // Do full side-path for each syn output: + for (int pathID = 0; pathID < paths.size(); pathID++) { + List path = paths.get(pathID); + if (path.size() > 1) { + int lastNode = outputBuffer.get(pathID).endNode; + for (int i = 1; i < path.size() - 1; i++) { + outputBuffer.add(new BufferedOutputToken(null, path.get(i), lastNode, lastNode + 1)); + lastNode++; + } + outputBuffer.add(new BufferedOutputToken(null, path.get(path.size() - 1), lastNode, endNode)); + } + } + + if (keepOrig && matchInputLength > 1) { + // Do full "side path" with the original tokens: + int lastNode = outputBuffer.get(paths.size()).endNode; + for (int i = 1; i < matchInputLength - 1; i++) { + BufferedInputToken token = lookahead.get(lookaheadNextRead + i); + outputBuffer.add(new BufferedOutputToken(token.state, token.term.toString(), lastNode, lastNode + 1)); + lastNode++; + } + BufferedInputToken token = lookahead.get(lookaheadNextRead + matchInputLength - 1); + outputBuffer.add(new BufferedOutputToken(token.state, token.term.toString(), lastNode, endNode)); + } + + /* + System.out.println(" after buffer: " + outputBuffer.size() + " tokens:"); + for(BufferedOutputToken token : outputBuffer) { + System.out.println(" tok: " + token.term + " startNode=" + token.startNode + " endNode=" + token.endNode); + } + */ + } + + /** + * Buffers the current input token into lookahead buffer. + */ + private void capture() { + assert liveToken; + liveToken = false; + BufferedInputToken token = lookahead.get(lookaheadNextWrite); + lookaheadNextWrite++; + + token.state = captureState(); + token.startOffset = offsetAtt.startOffset(); + token.endOffset = offsetAtt.endOffset(); + assert token.term.length() == 0; + token.term.append(termAtt); + + captureCount++; + maxLookaheadUsed = Math.max(maxLookaheadUsed, lookahead.getBufferSize()); + //System.out.println(" maxLookaheadUsed=" + maxLookaheadUsed); + } + + @Override + public void reset() throws IOException { + super.reset(); + lookahead.reset(); + lookaheadNextWrite = 0; + lookaheadNextRead = 0; + captureCount = 0; + lastNodeOut = -1; + nextNodeOut = 0; + matchStartOffset = -1; + matchEndOffset = -1; + finished = false; + liveToken = false; + outputBuffer.clear(); + maxLookaheadUsed = 0; + //System.out.println("S: reset"); + } + + // for testing + int getCaptureCount() { + return captureCount; + } + + // for testing + int getMaxLookaheadUsed() { + return maxLookaheadUsed; + } +} diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index ac9770f2bc8..976c4706725 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -41,7 +41,6 @@ import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.LegacyDateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.StringFieldType; @@ -336,11 +335,7 @@ public class MapperQueryParser extends AnalyzingQueryParser { BytesRef part1Binary = part1 == null ? null : getAnalyzer().normalize(field, part1); BytesRef part2Binary = part2 == null ? null : getAnalyzer().normalize(field, part2); Query rangeQuery; - if (currentFieldType instanceof LegacyDateFieldMapper.DateFieldType && settings.timeZone() != null) { - LegacyDateFieldMapper.DateFieldType dateFieldType = (LegacyDateFieldMapper.DateFieldType) this.currentFieldType; - rangeQuery = dateFieldType.rangeQuery(part1Binary, part2Binary, - startInclusive, endInclusive, settings.timeZone(), null, context); - } else if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) { + if (currentFieldType instanceof DateFieldMapper.DateFieldType && settings.timeZone() != null) { DateFieldMapper.DateFieldType dateFieldType = (DateFieldMapper.DateFieldType) this.currentFieldType; rangeQuery = dateFieldType.rangeQuery(part1Binary, part2Binary, startInclusive, endInclusive, settings.timeZone(), null, context); diff --git a/core/src/main/java/org/apache/lucene/search/GraphQuery.java b/core/src/main/java/org/apache/lucene/search/GraphQuery.java new file mode 100644 index 00000000000..cad316d701c --- /dev/null +++ b/core/src/main/java/org/apache/lucene/search/GraphQuery.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.search; + +import org.apache.lucene.index.IndexReader; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * A query that wraps multiple sub-queries generated from a graph token stream. + */ +public final class GraphQuery extends Query { + private final Query[] queries; + private final boolean hasBoolean; + + /** + * Constructor sets the queries and checks if any of them are + * a boolean query. + * + * @param queries the non-null array of queries + */ + public GraphQuery(Query... queries) { + this.queries = Objects.requireNonNull(queries).clone(); + for (Query query : queries) { + if (query instanceof BooleanQuery) { + hasBoolean = true; + return; + } + } + hasBoolean = false; + } + + /** + * Gets the queries + * + * @return unmodifiable list of Query + */ + public List getQueries() { + return Collections.unmodifiableList(Arrays.asList(queries)); + } + + /** + * If there is at least one boolean query or not. + * + * @return true if there is a boolean, false if not + */ + public boolean hasBoolean() { + return hasBoolean; + } + + /** + * Rewrites to a single query or a boolean query where each query is a SHOULD clause. + */ + @Override + public Query rewrite(IndexReader reader) throws IOException { + if (queries.length == 0) { + return new BooleanQuery.Builder().build(); + } + + if (queries.length == 1) { + return queries[0]; + } + + BooleanQuery.Builder q = new BooleanQuery.Builder(); + q.setDisableCoord(true); + for (Query clause : queries) { + q.add(clause, BooleanClause.Occur.SHOULD); + } + + return q.build(); + } + + @Override + public String toString(String field) { + StringBuilder builder = new StringBuilder("Graph("); + for (int i = 0; i < queries.length; i++) { + if (i != 0) { + builder.append(", "); + } + builder.append(Objects.toString(queries[i])); + } + builder.append(")"); + return builder.toString(); + } + + @Override + public boolean equals(Object other) { + return sameClassAs(other) && + Arrays.equals(queries, ((GraphQuery) other).queries); + } + + @Override + public int hashCode() { + return 31 * classHash() + Arrays.hashCode(queries); + } +} diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index 80103e1d0df..443944825fc 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -719,10 +719,9 @@ public class ElasticsearchException extends RuntimeException implements ToXConte STATUS_EXCEPTION(org.elasticsearch.ElasticsearchStatusException.class, org.elasticsearch.ElasticsearchStatusException::new, 145, UNKNOWN_VERSION_ADDED), TASK_CANCELLED_EXCEPTION(org.elasticsearch.tasks.TaskCancelledException.class, - org.elasticsearch.tasks.TaskCancelledException::new, 146, Version.V_5_1_0_UNRELEASED), + org.elasticsearch.tasks.TaskCancelledException::new, 146, Version.V_5_1_1_UNRELEASED), SHARD_LOCK_OBTAIN_FAILED_EXCEPTION(org.elasticsearch.env.ShardLockObtainFailedException.class, - org.elasticsearch.env.ShardLockObtainFailedException::new, 147, Version.V_5_0_2_UNRELEASED); - + org.elasticsearch.env.ShardLockObtainFailedException::new, 147, Version.V_5_0_2); final Class exceptionClass; final FunctionThatThrowsIOException constructor; diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 981cca72d44..80d870e2dcc 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -95,15 +95,18 @@ public class Version { public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_2_0); public static final int V_5_0_1_ID = 5000199; public static final Version V_5_0_1 = new Version(V_5_0_1_ID, org.apache.lucene.util.Version.LUCENE_6_2_1); - public static final int V_5_0_2_ID_UNRELEASED = 5000299; - public static final Version V_5_0_2_UNRELEASED = new Version(V_5_0_2_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_2_1); - public static final int V_5_1_0_ID_UNRELEASED = 5010099; - public static final Version V_5_1_0_UNRELEASED = new Version(V_5_1_0_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_3_0); + public static final int V_5_0_2_ID = 5000299; + public static final Version V_5_0_2 = new Version(V_5_0_2_ID, org.apache.lucene.util.Version.LUCENE_6_2_1); + public static final int V_5_0_3_ID_UNRELEASED = 5000399; + public static final Version V_5_0_3_UNRELEASED = new Version(V_5_0_3_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_3_0); + // no version constant for 5.1.0 due to inadvertent release + public static final int V_5_1_1_ID_UNRELEASED = 5010199; + public static final Version V_5_1_1_UNRELEASED = new Version(V_5_1_1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_3_0); public static final int V_5_2_0_ID_UNRELEASED = 5020099; public static final Version V_5_2_0_UNRELEASED = new Version(V_5_2_0_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_3_0); public static final int V_6_0_0_alpha1_ID_UNRELEASED = 6000001; public static final Version V_6_0_0_alpha1_UNRELEASED = - new Version(V_6_0_0_alpha1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_3_0); + new Version(V_6_0_0_alpha1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_4_0); public static final Version CURRENT = V_6_0_0_alpha1_UNRELEASED; // unreleased versions must be added to the above list with the suffix _UNRELEASED (with the exception of CURRENT) @@ -123,10 +126,12 @@ public class Version { return V_6_0_0_alpha1_UNRELEASED; case V_5_2_0_ID_UNRELEASED: return V_5_2_0_UNRELEASED; - case V_5_1_0_ID_UNRELEASED: - return V_5_1_0_UNRELEASED; - case V_5_0_2_ID_UNRELEASED: - return V_5_0_2_UNRELEASED; + case V_5_1_1_ID_UNRELEASED: + return V_5_1_1_UNRELEASED; + case V_5_0_3_ID_UNRELEASED: + return V_5_0_3_UNRELEASED; + case V_5_0_2_ID: + return V_5_0_2; case V_5_0_1_ID: return V_5_0_1; case V_5_0_0_ID: @@ -213,12 +218,17 @@ public class Version { } /** - * Returns the smallest version between the 2. + * Returns the minimum version between the 2. */ - public static Version smallest(Version version1, Version version2) { + public static Version min(Version version1, Version version2) { return version1.id < version2.id ? version1 : version2; } + /** + * Returns the maximum version between the 2 + */ + public static Version max(Version version1, Version version2) { return version1.id > version2.id ? version1 : version2; } + /** * Returns the version given its string representation, current version if the argument is null or empty */ @@ -321,7 +331,22 @@ public class Version { bwcMajor = major; bwcMinor = 0; } - return Version.smallest(this, fromId(bwcMajor * 1000000 + bwcMinor * 10000 + 99)); + return Version.min(this, fromId(bwcMajor * 1000000 + bwcMinor * 10000 + 99)); + } + + /** + * Returns the minimum created index version that this version supports. Indices created with lower versions + * can't be used with this version. + */ + public Version minimumIndexCompatibilityVersion() { + final int bwcMajor; + if (major == 5) { + bwcMajor = 2; // we jumped from 2 to 5 + } else { + bwcMajor = major - 1; + } + final int bwcMinor = 0; + return Version.min(this, fromId(bwcMajor * 1000000 + bwcMinor * 10000 + 99)); } /** @@ -409,5 +434,4 @@ public class Version { public boolean isRelease() { return build == 99; } - } diff --git a/core/src/main/java/org/elasticsearch/action/CompositeIndicesRequest.java b/core/src/main/java/org/elasticsearch/action/CompositeIndicesRequest.java index 5c88c57b83e..9c661e93be8 100644 --- a/core/src/main/java/org/elasticsearch/action/CompositeIndicesRequest.java +++ b/core/src/main/java/org/elasticsearch/action/CompositeIndicesRequest.java @@ -19,18 +19,11 @@ package org.elasticsearch.action; -import java.util.List; - /** - * Needs to be implemented by all {@link org.elasticsearch.action.ActionRequest} subclasses that are composed of multiple subrequests - * which relate to one or more indices. Allows to retrieve those subrequests and reason about them separately. A composite request is - * executed by its own transport action class (e.g. {@link org.elasticsearch.action.search.TransportMultiSearchAction}), which goes - * through all the subrequests and delegates their exection to the appropriate transport action (e.g. - * {@link org.elasticsearch.action.search.TransportSearchAction}) for each single item. + * Marker interface that needs to be implemented by all {@link org.elasticsearch.action.ActionRequest} subclasses that are composed of + * multiple sub-requests which relate to one or more indices. A composite request is executed by its own transport action class + * (e.g. {@link org.elasticsearch.action.search.TransportMultiSearchAction}), which goes through all sub-requests and delegates their + * execution to the appropriate transport action (e.g. {@link org.elasticsearch.action.search.TransportSearchAction}) for each single item. */ public interface CompositeIndicesRequest { - /** - * Returns the subrequests that a composite request is composed of - */ - List subRequests(); } diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java index 61e5a4ff590..36d63bbcebe 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java @@ -134,7 +134,7 @@ public class ClusterSearchShardsRequest extends MasterNodeReadRequest(); for (int i = 0; i < size; i++) { @@ -93,7 +93,7 @@ public class ClusterSearchShardsResponse extends ActionResponse implements ToXCo for (DiscoveryNode node : nodes) { node.writeTo(out); } - if (out.getVersion().onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { out.writeVInt(indicesAndFilters.size()); for (Map.Entry entry : indicesAndFilters.entrySet()) { out.writeString(entry.getKey()); @@ -115,7 +115,8 @@ public class ClusterSearchShardsResponse extends ActionResponse implements ToXCo String index = entry.getKey(); builder.startObject(index); AliasFilter aliasFilter = entry.getValue(); - if (aliasFilter.getQueryBuilder() != null) { + if (aliasFilter.getAliases().length > 0) { + builder.array("aliases", aliasFilter.getAliases()); builder.field("filter"); aliasFilter.getQueryBuilder().toXContent(builder, params); } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index 48d163cdb26..5351579278d 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -211,11 +211,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques return this.requests; } - @Override - public List subRequests() { - return requests.stream().collect(Collectors.toList()); - } - /** * The list of optional payloads associated with requests in the same order as the requests. Note, elements within * it might be null if no payload has been provided. @@ -305,8 +300,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques String parent = null; FetchSourceContext fetchSourceContext = defaultFetchSourceContext; String[] fields = defaultFields; - String timestamp = null; - TimeValue ttl = null; String opType = null; long version = Versions.MATCH_ANY; VersionType versionType = VersionType.INTERNAL; @@ -336,14 +329,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques routing = parser.text(); } else if ("_parent".equals(currentFieldName) || "parent".equals(currentFieldName)) { parent = parser.text(); - } else if ("_timestamp".equals(currentFieldName) || "timestamp".equals(currentFieldName)) { - timestamp = parser.text(); - } else if ("_ttl".equals(currentFieldName) || "ttl".equals(currentFieldName)) { - if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { - ttl = TimeValue.parseTimeValue(parser.text(), null, currentFieldName); - } else { - ttl = new TimeValue(parser.longValue()); - } } else if ("op_type".equals(currentFieldName) || "opType".equals(currentFieldName)) { opType = parser.text(); } else if ("_version".equals(currentFieldName) || "version".equals(currentFieldName)) { @@ -394,15 +379,15 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques // of index request. if ("index".equals(action)) { if (opType == null) { - internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) + internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType) .setPipeline(pipeline).source(data.slice(from, nextMarker - from)), payload); } else { - internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) + internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType) .create("create".equals(opType)).setPipeline(pipeline) .source(data.slice(from, nextMarker - from)), payload); } } else if ("create".equals(action)) { - internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).timestamp(timestamp).ttl(ttl).version(version).versionType(versionType) + internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType) .create(true).setPipeline(pipeline) .source(data.slice(from, nextMarker - from)), payload); } else if ("update".equals(action)) { @@ -420,15 +405,11 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques IndexRequest upsertRequest = updateRequest.upsertRequest(); if (upsertRequest != null) { - upsertRequest.timestamp(timestamp); - upsertRequest.ttl(ttl); upsertRequest.version(version); upsertRequest.versionType(versionType); } IndexRequest doc = updateRequest.doc(); if (doc != null) { - doc.timestamp(timestamp); - doc.ttl(ttl); doc.version(version); doc.versionType(versionType); } diff --git a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java index ee4ce570df2..8772752639b 100644 --- a/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java +++ b/core/src/main/java/org/elasticsearch/action/get/MultiGetRequest.java @@ -284,11 +284,6 @@ public class MultiGetRequest extends ActionRequest implements Iterable subRequests() { - return items; - } - /** * Sets the preference to execute the search. Defaults to randomize across shards. Can be set to * _local to prefer local shards, _primary to execute only on primary shards, or diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java index fdc248c4b37..b818f0f2548 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -20,10 +20,10 @@ package org.elasticsearch.action.index; import org.elasticsearch.ElasticsearchGenerationException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.RoutingMissingException; -import org.elasticsearch.action.TimestampParsingException; import org.elasticsearch.action.support.replication.ReplicatedWriteRequest; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.metadata.MappingMetaData; @@ -41,7 +41,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -75,10 +74,6 @@ public class IndexRequest extends ReplicatedWriteRequest implement private String routing; @Nullable private String parent; - @Nullable - private String timestamp; - @Nullable - private TimeValue ttl; private BytesReference source; @@ -164,12 +159,6 @@ public class IndexRequest extends ReplicatedWriteRequest implement validationException = addValidationError("version type [force] may no longer be used", validationException); } - if (ttl != null) { - if (ttl.millis() < 0) { - validationException = addValidationError("ttl must not be negative", validationException); - } - } - if (id != null && id.getBytes(StandardCharsets.UTF_8).length > 512) { validationException = addValidationError("id is too long, must be no longer than 512 bytes but was: " + id.getBytes(StandardCharsets.UTF_8).length, validationException); @@ -265,49 +254,6 @@ public class IndexRequest extends ReplicatedWriteRequest implement return this.parent; } - /** - * Sets the timestamp either as millis since the epoch, or, in the configured date format. - */ - public IndexRequest timestamp(String timestamp) { - this.timestamp = timestamp; - return this; - } - - public String timestamp() { - return this.timestamp; - } - - /** - * Sets the ttl value as a time value expression. - */ - public IndexRequest ttl(String ttl) { - this.ttl = TimeValue.parseTimeValue(ttl, null, "ttl"); - return this; - } - - /** - * Sets the ttl as a {@link TimeValue} instance. - */ - public IndexRequest ttl(TimeValue ttl) { - this.ttl = ttl; - return this; - } - - /** - * Sets the relative ttl value in milliseconds. It musts be greater than 0 as it makes little sense otherwise. - */ - public IndexRequest ttl(long ttl) { - this.ttl = new TimeValue(ttl); - return this; - } - - /** - * Returns the ttl as a {@link TimeValue} - */ - public TimeValue ttl() { - return this.ttl; - } - /** * Sets the ingest pipeline to be executed before indexing the document */ @@ -537,11 +483,6 @@ public class IndexRequest extends ReplicatedWriteRequest implement public void process(@Nullable MappingMetaData mappingMd, boolean allowIdGeneration, String concreteIndex) { - // resolve timestamp if provided externally - if (timestamp != null) { - timestamp = MappingMetaData.Timestamp.parseStringTimestamp(timestamp, - mappingMd != null ? mappingMd.timestamp().dateTimeFormatter() : TimestampFieldMapper.Defaults.DATE_TIME_FORMATTER); - } if (mappingMd != null) { // might as well check for routing here if (mappingMd.routing().required() && routing == null) { @@ -563,30 +504,6 @@ public class IndexRequest extends ReplicatedWriteRequest implement autoGeneratedTimestamp = Math.max(0, System.currentTimeMillis()); // extra paranoia id(UUIDs.base64UUID()); } - - // generate timestamp if not provided, we always have one post this stage... - if (timestamp == null) { - String defaultTimestamp = TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP; - if (mappingMd != null && mappingMd.timestamp() != null) { - // If we explicitly ask to reject null timestamp - if (mappingMd.timestamp().ignoreMissing() != null && mappingMd.timestamp().ignoreMissing() == false) { - throw new TimestampParsingException("timestamp is required by mapping"); - } - defaultTimestamp = mappingMd.timestamp().defaultTimestamp(); - } - - if (defaultTimestamp.equals(TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP)) { - timestamp = Long.toString(System.currentTimeMillis()); - } else { - // if we are here, the defaultTimestamp is not - // TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP but - // this can only happen if defaultTimestamp was - // assigned again because mappingMd and - // mappingMd#timestamp() are not null - assert mappingMd != null; - timestamp = MappingMetaData.Timestamp.parseStringTimestamp(defaultTimestamp, mappingMd.timestamp().dateTimeFormatter()); - } - } } /* resolve the routing if needed */ @@ -601,8 +518,10 @@ public class IndexRequest extends ReplicatedWriteRequest implement id = in.readOptionalString(); routing = in.readOptionalString(); parent = in.readOptionalString(); - timestamp = in.readOptionalString(); - ttl = in.readOptionalWriteable(TimeValue::new); + if (in.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) { + in.readOptionalString(); // timestamp + in.readOptionalWriteable(TimeValue::new); // ttl + } source = in.readBytesReference(); opType = OpType.fromId(in.readByte()); version = in.readLong(); @@ -619,8 +538,10 @@ public class IndexRequest extends ReplicatedWriteRequest implement out.writeOptionalString(id); out.writeOptionalString(routing); out.writeOptionalString(parent); - out.writeOptionalString(timestamp); - out.writeOptionalWriteable(ttl); + if (out.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) { + out.writeOptionalString(null); + out.writeOptionalWriteable(null); + } out.writeBytesReference(source); out.writeByte(opType.getId()); out.writeLong(version); diff --git a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java index 310ef3fb928..6f706419826 100644 --- a/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/index/IndexRequestBuilder.java @@ -25,7 +25,6 @@ import org.elasticsearch.action.support.replication.ReplicationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -231,38 +230,6 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder subRequests() { - return this.requests; - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; diff --git a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java index da9dae6759d..d54448f1c8f 100644 --- a/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java +++ b/core/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsRequest.java @@ -23,7 +23,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; -import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.RealtimeRequest; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.Nullable; @@ -76,11 +75,6 @@ public class MultiTermVectorsRequest extends ActionRequest implements Iterable subRequests() { - return requests; - } - @Override public Iterator iterator() { return Collections.unmodifiableCollection(requests).iterator(); diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java index 0e37b6ff064..7f5482afb0d 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateHelper.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; @@ -42,8 +41,6 @@ import org.elasticsearch.index.get.GetField; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.script.ExecutableScript; @@ -55,7 +52,6 @@ import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.function.LongSupplier; @@ -76,7 +72,7 @@ public class UpdateHelper extends AbstractComponent { */ public Result prepare(UpdateRequest request, IndexShard indexShard, LongSupplier nowInMillis) { final GetResult getResult = indexShard.getService().get(request.type(), request.id(), - new String[]{RoutingFieldMapper.NAME, ParentFieldMapper.NAME, TTLFieldMapper.NAME, TimestampFieldMapper.NAME}, + new String[]{RoutingFieldMapper.NAME, ParentFieldMapper.NAME}, true, request.version(), request.versionType(), FetchSourceContext.FETCH_SOURCE); return prepare(indexShard.shardId(), request, getResult, nowInMillis); } @@ -86,13 +82,11 @@ public class UpdateHelper extends AbstractComponent { */ @SuppressWarnings("unchecked") protected Result prepare(ShardId shardId, UpdateRequest request, final GetResult getResult, LongSupplier nowInMillis) { - long getDateNS = System.nanoTime(); if (!getResult.isExists()) { if (request.upsertRequest() == null && !request.docAsUpsert()) { throw new DocumentMissingException(shardId, request.type(), request.id()); } IndexRequest indexRequest = request.docAsUpsert() ? request.doc() : request.upsertRequest(); - TimeValue ttl = indexRequest.ttl(); if (request.scriptedUpsert() && request.script() != null) { // Run the script to perform the create logic IndexRequest upsert = request.upsertRequest(); @@ -103,10 +97,6 @@ public class UpdateHelper extends AbstractComponent { ctx.put("_source", upsertDoc); ctx.put("_now", nowInMillis.getAsLong()); ctx = executeScript(request.script, ctx); - //Allow the script to set TTL using ctx._ttl - if (ttl == null) { - ttl = getTTLFromScriptContext(ctx); - } //Allow the script to abort the create by setting "op" to "none" String scriptOpChoice = (String) ctx.get("op"); @@ -129,7 +119,6 @@ public class UpdateHelper extends AbstractComponent { indexRequest.index(request.index()).type(request.type()).id(request.id()) // it has to be a "create!" .create(true) - .ttl(ttl) .setRefreshPolicy(request.getRefreshPolicy()) .routing(request.routing()) .parent(request.parent()) @@ -155,8 +144,6 @@ public class UpdateHelper extends AbstractComponent { Tuple> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true); String operation = null; - String timestamp = null; - TimeValue ttl = null; final Map updatedSourceAsMap; final XContentType updateSourceContentType = sourceAndContent.v1(); String routing = getResult.getFields().containsKey(RoutingFieldMapper.NAME) ? getResult.field(RoutingFieldMapper.NAME).getValue().toString() : null; @@ -165,10 +152,6 @@ public class UpdateHelper extends AbstractComponent { if (request.script() == null && request.doc() != null) { IndexRequest indexRequest = request.doc(); updatedSourceAsMap = sourceAndContent.v2(); - if (indexRequest.ttl() != null) { - ttl = indexRequest.ttl(); - } - timestamp = indexRequest.timestamp(); if (indexRequest.routing() != null) { routing = indexRequest.routing(); } @@ -184,16 +167,12 @@ public class UpdateHelper extends AbstractComponent { } } else { Map ctx = new HashMap<>(16); - Long originalTtl = getResult.getFields().containsKey(TTLFieldMapper.NAME) ? (Long) getResult.field(TTLFieldMapper.NAME).getValue() : null; - Long originalTimestamp = getResult.getFields().containsKey(TimestampFieldMapper.NAME) ? (Long) getResult.field(TimestampFieldMapper.NAME).getValue() : null; ctx.put("_index", getResult.getIndex()); ctx.put("_type", getResult.getType()); ctx.put("_id", getResult.getId()); ctx.put("_version", getResult.getVersion()); ctx.put("_routing", routing); ctx.put("_parent", parent); - ctx.put("_timestamp", originalTimestamp); - ctx.put("_ttl", originalTtl); ctx.put("_source", sourceAndContent.v2()); ctx.put("_now", nowInMillis.getAsLong()); @@ -201,34 +180,14 @@ public class UpdateHelper extends AbstractComponent { operation = (String) ctx.get("op"); - Object fetchedTimestamp = ctx.get("_timestamp"); - if (fetchedTimestamp != null) { - timestamp = fetchedTimestamp.toString(); - } else if (originalTimestamp != null) { - // No timestamp has been given in the update script, so we keep the previous timestamp if there is one - timestamp = originalTimestamp.toString(); - } - - ttl = getTTLFromScriptContext(ctx); - updatedSourceAsMap = (Map) ctx.get("_source"); } - // apply script to update the source - // No TTL has been given in the update script so we keep previous TTL value if there is one - if (ttl == null) { - Long ttlAsLong = getResult.getFields().containsKey(TTLFieldMapper.NAME) ? (Long) getResult.field(TTLFieldMapper.NAME).getValue() : null; - if (ttlAsLong != null) { - ttl = new TimeValue(ttlAsLong - TimeValue.nsecToMSec(System.nanoTime() - getDateNS));// It is an approximation of exact TTL value, could be improved - } - } - if (operation == null || "index".equals(operation)) { final IndexRequest indexRequest = Requests.indexRequest(request.index()).type(request.type()).id(request.id()).routing(routing).parent(parent) .source(updatedSourceAsMap, updateSourceContentType) .version(updateVersion).versionType(request.versionType()) .waitForActiveShards(request.waitForActiveShards()) - .timestamp(timestamp).ttl(ttl) .setRefreshPolicy(request.getRefreshPolicy()); return new Result(indexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType); } else if ("delete".equals(operation)) { @@ -263,17 +222,6 @@ public class UpdateHelper extends AbstractComponent { return ctx; } - private TimeValue getTTLFromScriptContext(Map ctx) { - Object fetchedTTL = ctx.get("_ttl"); - if (fetchedTTL != null) { - if (fetchedTTL instanceof Number) { - return new TimeValue(((Number) fetchedTTL).longValue()); - } - return TimeValue.parseTimeValue((String) fetchedTTL, null, "_ttl"); - } - return null; - } - /** * Applies {@link UpdateRequest#fetchSource()} to the _source of the updated document to be returned in a update response. * For BWC this function also extracts the {@link UpdateRequest#fields()} from the updated document to be returned in a update response diff --git a/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java b/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java index bbbc9bafd8f..50d84a24129 100644 --- a/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java +++ b/core/src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java @@ -28,7 +28,6 @@ import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -355,33 +354,4 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder) () -> new ParameterizedMessage("failed to connect to node [{}], removed from nodes list", listedNode), e); hostFailureListener.onNodeDisconnected(listedNode, e); @@ -469,7 +470,7 @@ final class TransportClientNodesService extends AbstractComponent implements Clo } else { // its a listed node, light connect to it... logger.trace("connecting to listed node (light) [{}]", listedNode); - transportService.connectToNodeLight(listedNode); + transportService.connectToNode(listedNode, ConnectionProfile.LIGHT_PROFILE); } } catch (Exception e) { logger.debug( diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index 25836d54a1b..a1f217e1377 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -224,12 +224,15 @@ public class IndexMetaData implements Diffable, FromXContentBuild Setting.boolSetting(SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false, Property.Dynamic, Property.IndexScope); public static final String INDEX_UUID_NA_VALUE = "_na_"; + public static final String INDEX_ROUTING_REQUIRE_GROUP_PREFIX = "index.routing.allocation.require"; + public static final String INDEX_ROUTING_INCLUDE_GROUP_PREFIX = "index.routing.allocation.include"; + public static final String INDEX_ROUTING_EXCLUDE_GROUP_PREFIX = "index.routing.allocation.exclude"; public static final Setting INDEX_ROUTING_REQUIRE_GROUP_SETTING = - Setting.groupSetting("index.routing.allocation.require.", Property.Dynamic, Property.IndexScope); + Setting.groupSetting(INDEX_ROUTING_REQUIRE_GROUP_PREFIX + ".", Property.Dynamic, Property.IndexScope); public static final Setting INDEX_ROUTING_INCLUDE_GROUP_SETTING = - Setting.groupSetting("index.routing.allocation.include.", Property.Dynamic, Property.IndexScope); + Setting.groupSetting(INDEX_ROUTING_INCLUDE_GROUP_PREFIX + ".", Property.Dynamic, Property.IndexScope); public static final Setting INDEX_ROUTING_EXCLUDE_GROUP_SETTING = - Setting.groupSetting("index.routing.allocation.exclude.", Property.Dynamic, Property.IndexScope); + Setting.groupSetting(INDEX_ROUTING_EXCLUDE_GROUP_PREFIX + ".", Property.Dynamic, Property.IndexScope); public static final Setting INDEX_ROUTING_INITIAL_RECOVERY_GROUP_SETTING = Setting.groupSetting("index.routing.allocation.initial_recovery."); // this is only setable internally not a registered setting!! diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index 0798dff1c93..3ea61385f1c 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -19,19 +19,17 @@ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.action.TimestampParsingException; +import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import java.io.IOException; import java.util.Map; @@ -75,103 +73,17 @@ public class MappingMetaData extends AbstractDiffable { } } - public static class Timestamp { - - public static String parseStringTimestamp(String timestampAsString, FormatDateTimeFormatter dateTimeFormatter) throws TimestampParsingException { - try { - return Long.toString(dateTimeFormatter.parser().parseMillis(timestampAsString)); - } catch (RuntimeException e) { - throw new TimestampParsingException(timestampAsString, e); - } - } - - - public static final Timestamp EMPTY = new Timestamp(false, TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, - TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP, null); - - private final boolean enabled; - - private final String format; - - private final FormatDateTimeFormatter dateTimeFormatter; - - private final String defaultTimestamp; - - private final Boolean ignoreMissing; - - public Timestamp(boolean enabled, String format, String defaultTimestamp, Boolean ignoreMissing) { - this.enabled = enabled; - this.format = format; - this.dateTimeFormatter = Joda.forPattern(format); - this.defaultTimestamp = defaultTimestamp; - this.ignoreMissing = ignoreMissing; - } - - public boolean enabled() { - return enabled; - } - - public String format() { - return this.format; - } - - public String defaultTimestamp() { - return this.defaultTimestamp; - } - - public boolean hasDefaultTimestamp() { - return this.defaultTimestamp != null; - } - - public Boolean ignoreMissing() { - return ignoreMissing; - } - - public FormatDateTimeFormatter dateTimeFormatter() { - return this.dateTimeFormatter; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Timestamp timestamp = (Timestamp) o; - - if (enabled != timestamp.enabled) return false; - if (format != null ? !format.equals(timestamp.format) : timestamp.format != null) return false; - if (defaultTimestamp != null ? !defaultTimestamp.equals(timestamp.defaultTimestamp) : timestamp.defaultTimestamp != null) return false; - if (ignoreMissing != null ? !ignoreMissing.equals(timestamp.ignoreMissing) : timestamp.ignoreMissing != null) return false; - - return true; - } - - @Override - public int hashCode() { - int result = (enabled ? 1 : 0); - result = 31 * result + (format != null ? format.hashCode() : 0); - result = 31 * result + (dateTimeFormatter != null ? dateTimeFormatter.hashCode() : 0); - result = 31 * result + (defaultTimestamp != null ? defaultTimestamp.hashCode() : 0); - result = 31 * result + (ignoreMissing != null ? ignoreMissing.hashCode() : 0); - return result; - } - } - private final String type; private final CompressedXContent source; private Routing routing; - private Timestamp timestamp; private boolean hasParentField; public MappingMetaData(DocumentMapper docMapper) { this.type = docMapper.type(); this.source = docMapper.mappingSource(); this.routing = new Routing(docMapper.routingFieldMapper().required()); - this.timestamp = new Timestamp(docMapper.timestampFieldMapper().enabled(), - docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), docMapper.timestampFieldMapper().defaultTimestamp(), - docMapper.timestampFieldMapper().ignoreMissing()); this.hasParentField = docMapper.parentFieldMapper().active(); } @@ -227,29 +139,6 @@ public class MappingMetaData extends AbstractDiffable { } else { this.routing = Routing.EMPTY; } - if (withoutType.containsKey("_timestamp")) { - boolean enabled = false; - String format = TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT; - String defaultTimestamp = TimestampFieldMapper.Defaults.DEFAULT_TIMESTAMP; - Boolean ignoreMissing = null; - Map timestampNode = (Map) withoutType.get("_timestamp"); - for (Map.Entry entry : timestampNode.entrySet()) { - String fieldName = entry.getKey(); - Object fieldNode = entry.getValue(); - if (fieldName.equals("enabled")) { - enabled = lenientNodeBooleanValue(fieldNode); - } else if (fieldName.equals("format")) { - format = fieldNode.toString(); - } else if (fieldName.equals("default") && fieldNode != null) { - defaultTimestamp = fieldNode.toString(); - } else if (fieldName.equals("ignore_missing")) { - ignoreMissing = lenientNodeBooleanValue(fieldNode); - } - } - this.timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing); - } else { - this.timestamp = Timestamp.EMPTY; - } if (withoutType.containsKey("_parent")) { this.hasParentField = true; } else { @@ -257,11 +146,10 @@ public class MappingMetaData extends AbstractDiffable { } } - public MappingMetaData(String type, CompressedXContent source, Routing routing, Timestamp timestamp, boolean hasParentField) { + public MappingMetaData(String type, CompressedXContent source, Routing routing, boolean hasParentField) { this.type = type; this.source = source; this.routing = routing; - this.timestamp = timestamp; this.hasParentField = hasParentField; } @@ -269,9 +157,6 @@ public class MappingMetaData extends AbstractDiffable { if (routing == Routing.EMPTY) { routing = defaultMapping.routing(); } - if (timestamp == Timestamp.EMPTY) { - timestamp = defaultMapping.timestamp(); - } } public String type() { @@ -309,21 +194,19 @@ public class MappingMetaData extends AbstractDiffable { return this.routing; } - public Timestamp timestamp() { - return this.timestamp; - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(type()); source().writeTo(out); // routing out.writeBoolean(routing().required()); - // timestamp - out.writeBoolean(timestamp().enabled()); - out.writeString(timestamp().format()); - out.writeOptionalString(timestamp().defaultTimestamp()); - out.writeOptionalBoolean(timestamp().ignoreMissing()); + if (out.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) { + // timestamp + out.writeBoolean(false); // enabled + out.writeString(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format()); + out.writeOptionalString(null); + out.writeOptionalBoolean(null); + } out.writeBoolean(hasParentField()); } @@ -336,7 +219,6 @@ public class MappingMetaData extends AbstractDiffable { if (!routing.equals(that.routing)) return false; if (!source.equals(that.source)) return false; - if (!timestamp.equals(that.timestamp)) return false; if (!type.equals(that.type)) return false; return true; @@ -347,7 +229,6 @@ public class MappingMetaData extends AbstractDiffable { int result = type.hashCode(); result = 31 * result + source.hashCode(); result = 31 * result + routing.hashCode(); - result = 31 * result + timestamp.hashCode(); return result; } @@ -356,18 +237,19 @@ public class MappingMetaData extends AbstractDiffable { CompressedXContent source = CompressedXContent.readCompressedString(in); // routing Routing routing = new Routing(in.readBoolean()); - // timestamp + if (in.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) { + // timestamp + boolean enabled = in.readBoolean(); + if (enabled) { + throw new IllegalArgumentException("_timestamp may not be enabled"); + } + in.readString(); // format + in.readOptionalString(); // defaultTimestamp + in.readOptionalBoolean(); // ignoreMissing + } - boolean enabled = in.readBoolean(); - String format = in.readString(); - String defaultTimestamp = in.readOptionalString(); - Boolean ignoreMissing = null; - - ignoreMissing = in.readOptionalBoolean(); - - final Timestamp timestamp = new Timestamp(enabled, format, defaultTimestamp, ignoreMissing); final boolean hasParentField = in.readBoolean(); - return new MappingMetaData(type, source, routing, timestamp, hasParentField); + return new MappingMetaData(type, source, routing, hasParentField); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 41b1923c43f..9e9923a92d5 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -55,7 +55,6 @@ import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.recovery.RecoverySettings; -import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.ingest.IngestMetadata; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.ScriptMetaData; @@ -761,7 +760,6 @@ public class MetaData implements Iterable, Diffable, Fr /** All known time cluster settings. */ public static final Set CLUSTER_TIME_SETTINGS = unmodifiableSet(newHashSet( - IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.getKey(), RecoverySettings.INDICES_RECOVERY_RETRY_DELAY_NETWORK_SETTING.getKey(), RecoverySettings.INDICES_RECOVERY_ACTIVITY_TIMEOUT_SETTING.getKey(), diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index c19dcdd0ecb..b1ffccf6aeb 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -316,7 +316,7 @@ public class MetaDataCreateIndexService extends AbstractComponent { if (indexSettingsBuilder.get(SETTING_VERSION_CREATED) == null) { DiscoveryNodes nodes = currentState.nodes(); - final Version createdVersion = Version.smallest(Version.CURRENT, nodes.getSmallestNonClientNodeVersion()); + final Version createdVersion = Version.min(Version.CURRENT, nodes.getSmallestNonClientNodeVersion()); indexSettingsBuilder.put(SETTING_VERSION_CREATED, createdVersion); } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java index 689eff0da61..2a2c6c65b96 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexStateService.java @@ -20,6 +20,7 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.close.CloseIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexClusterStateUpdateRequest; @@ -160,12 +161,14 @@ public class MetaDataIndexStateService extends AbstractComponent { MetaData.Builder mdBuilder = MetaData.builder(currentState.metaData()); ClusterBlocks.Builder blocksBuilder = ClusterBlocks.builder() .blocks(currentState.blocks()); + final Version minIndexCompatibilityVersion = currentState.getNodes().getMaxNodeVersion() + .minimumIndexCompatibilityVersion(); for (IndexMetaData closedMetaData : indicesToOpen) { final String indexName = closedMetaData.getIndex().getName(); IndexMetaData indexMetaData = IndexMetaData.builder(closedMetaData).state(IndexMetaData.State.OPEN).build(); // The index might be closed because we couldn't import it due to old incompatible version // We need to check that this index can be upgraded to the current version - indexMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData); + indexMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData, minIndexCompatibilityVersion); try { indicesService.verifyIndexMetadata(indexMetaData, indexMetaData); } catch (Exception e) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java index b261b8850c6..2a8b80b9e68 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeService.java @@ -67,13 +67,13 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { * If the index does not need upgrade it returns the index metadata unchanged, otherwise it returns a modified index metadata. If index * cannot be updated the method throws an exception. */ - public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData) { + public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData, Version minimumIndexCompatibilityVersion) { // Throws an exception if there are too-old segments: if (isUpgraded(indexMetaData)) { assert indexMetaData == archiveBrokenIndexSettings(indexMetaData) : "all settings must have been upgraded before"; return indexMetaData; } - checkSupportedVersion(indexMetaData); + checkSupportedVersion(indexMetaData, minimumIndexCompatibilityVersion); IndexMetaData newMetaData = indexMetaData; // we have to run this first otherwise in we try to create IndexSettings // with broken settings and fail in checkMappingsCompatibility @@ -92,21 +92,26 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { } /** - * Elasticsearch 5.0 no longer supports indices with pre Lucene v5.0 (Elasticsearch v2.0.0.beta1) segments. All indices - * that were created before Elasticsearch v2.0.0.beta1 should be reindexed in Elasticsearch 2.x - * before they can be opened by this version of elasticsearch. */ - private void checkSupportedVersion(IndexMetaData indexMetaData) { - if (indexMetaData.getState() == IndexMetaData.State.OPEN && isSupportedVersion(indexMetaData) == false) { - throw new IllegalStateException("The index [" + indexMetaData.getIndex() + "] was created before v2.0.0.beta1." - + " It should be reindexed in Elasticsearch 2.x before upgrading to " + Version.CURRENT + "."); + * Elasticsearch v6.0 no longer supports indices created pre v5.0. All indices + * that were created before Elasticsearch v5.0 should be re-indexed in Elasticsearch 5.x + * before they can be opened by this version of elasticsearch. + */ + private void checkSupportedVersion(IndexMetaData indexMetaData, Version minimumIndexCompatibilityVersion) { + if (indexMetaData.getState() == IndexMetaData.State.OPEN && isSupportedVersion(indexMetaData, + minimumIndexCompatibilityVersion) == false) { + throw new IllegalStateException("The index [" + indexMetaData.getIndex() + "] was created with version [" + + indexMetaData.getCreationVersion() + "] but the minimum compatible version is [" + + + minimumIndexCompatibilityVersion + "]. It should be re-indexed in Elasticsearch " + minimumIndexCompatibilityVersion.major + + ".x before upgrading to " + Version.CURRENT + "."); } } /* * Returns true if this index can be supported by the current version of elasticsearch */ - private static boolean isSupportedVersion(IndexMetaData indexMetaData) { - return indexMetaData.getCreationVersion().onOrAfter(Version.V_2_0_0_beta1); + private static boolean isSupportedVersion(IndexMetaData indexMetaData, Version minimumIndexCompatibilityVersion) { + return indexMetaData.getCreationVersion().onOrAfter(minimumIndexCompatibilityVersion); } /** @@ -173,4 +178,4 @@ public class MetaDataIndexUpgradeService extends AbstractComponent { return indexMetaData; } } -} \ No newline at end of file +} diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index e90ada51022..a8e59006af7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -135,7 +135,8 @@ public class DiscoveryNode implements Writeable, ToXContent { */ public DiscoveryNode(String nodeName, String nodeId, TransportAddress address, Map attributes, Set roles, Version version) { - this(nodeName, nodeId, UUIDs.randomBase64UUID(), address.getAddress(), address.getAddress(), address, attributes, roles, version); + this(nodeName, nodeId, UUIDs.randomBase64UUID(), address.address().getHostString(), address.getAddress(), address, attributes, + roles, version); } /** diff --git a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java index e557b52c1d4..6d80a9573ba 100644 --- a/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/node/DiscoveryNodes.java @@ -56,10 +56,13 @@ public class DiscoveryNodes extends AbstractDiffable implements private final String masterNodeId; private final String localNodeId; private final Version minNonClientNodeVersion; + private final Version maxNodeVersion; + private final Version minNodeVersion; private DiscoveryNodes(ImmutableOpenMap nodes, ImmutableOpenMap dataNodes, ImmutableOpenMap masterNodes, ImmutableOpenMap ingestNodes, - String masterNodeId, String localNodeId, Version minNonClientNodeVersion) { + String masterNodeId, String localNodeId, Version minNonClientNodeVersion, Version maxNodeVersion, + Version minNodeVersion) { this.nodes = nodes; this.dataNodes = dataNodes; this.masterNodes = masterNodes; @@ -67,6 +70,8 @@ public class DiscoveryNodes extends AbstractDiffable implements this.masterNodeId = masterNodeId; this.localNodeId = localNodeId; this.minNonClientNodeVersion = minNonClientNodeVersion; + this.minNodeVersion = minNodeVersion; + this.maxNodeVersion = maxNodeVersion; } @Override @@ -235,6 +240,24 @@ public class DiscoveryNodes extends AbstractDiffable implements return minNonClientNodeVersion; } + /** + * Returns the version of the node with the oldest version in the cluster. + * + * @return the oldest version in the cluster + */ + public Version getMinNodeVersion() { + return minNodeVersion; + } + + /** + * Returns the version of the node with the yougest version in the cluster + * + * @return the oldest version in the cluster + */ + public Version getMaxNodeVersion() { + return maxNodeVersion; + } + /** * Resolve a node with a given id * @@ -631,25 +654,27 @@ public class DiscoveryNodes extends AbstractDiffable implements ImmutableOpenMap.Builder masterNodesBuilder = ImmutableOpenMap.builder(); ImmutableOpenMap.Builder ingestNodesBuilder = ImmutableOpenMap.builder(); Version minNodeVersion = Version.CURRENT; + Version maxNodeVersion = Version.CURRENT; Version minNonClientNodeVersion = Version.CURRENT; for (ObjectObjectCursor nodeEntry : nodes) { if (nodeEntry.value.isDataNode()) { dataNodesBuilder.put(nodeEntry.key, nodeEntry.value); - minNonClientNodeVersion = Version.smallest(minNonClientNodeVersion, nodeEntry.value.getVersion()); + minNonClientNodeVersion = Version.min(minNonClientNodeVersion, nodeEntry.value.getVersion()); } if (nodeEntry.value.isMasterNode()) { masterNodesBuilder.put(nodeEntry.key, nodeEntry.value); - minNonClientNodeVersion = Version.smallest(minNonClientNodeVersion, nodeEntry.value.getVersion()); + minNonClientNodeVersion = Version.min(minNonClientNodeVersion, nodeEntry.value.getVersion()); } if (nodeEntry.value.isIngestNode()) { ingestNodesBuilder.put(nodeEntry.key, nodeEntry.value); } - minNodeVersion = Version.smallest(minNodeVersion, nodeEntry.value.getVersion()); + minNodeVersion = Version.min(minNodeVersion, nodeEntry.value.getVersion()); + maxNodeVersion = Version.max(maxNodeVersion, nodeEntry.value.getVersion()); } return new DiscoveryNodes( nodes.build(), dataNodesBuilder.build(), masterNodesBuilder.build(), ingestNodesBuilder.build(), - masterNodeId, localNodeId, minNonClientNodeVersion + masterNodeId, localNodeId, minNonClientNodeVersion, maxNodeVersion, minNodeVersion ); } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationExplanation.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationExplanation.java deleted file mode 100644 index e2b5de9b52d..00000000000 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/AllocationExplanation.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.routing.allocation; - -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Streamable; -import org.elasticsearch.index.shard.ShardId; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Instances of this class keeps explanations of decisions that have been made by allocation. - * An {@link AllocationExplanation} consists of a set of per node explanations. - * Since {@link NodeExplanation}s are related to shards an {@link AllocationExplanation} maps - * a shards id to a set of {@link NodeExplanation}s. - */ -public class AllocationExplanation implements Streamable { - - public static final AllocationExplanation EMPTY = new AllocationExplanation(); - - /** - * Instances of this class keep messages and informations about nodes of an allocation - */ - public static class NodeExplanation { - private final DiscoveryNode node; - - private final String description; - - /** - * Creates a new {@link NodeExplanation} - * - * @param node node referenced by this {@link NodeExplanation} - * @param description a message associated with the given node - */ - public NodeExplanation(DiscoveryNode node, String description) { - this.node = node; - this.description = description; - } - - /** - * The node referenced by the explanation - * @return referenced node - */ - public DiscoveryNode node() { - return node; - } - - /** - * Get the explanation for the node - * @return explanation for the node - */ - public String description() { - return description; - } - } - - private final Map> explanations = new HashMap<>(); - - /** - * Create and add a node explanation to this explanation referencing a shard - * @param shardId id the of the referenced shard - * @param nodeExplanation Explanation itself - * @return AllocationExplanation involving the explanation - */ - public AllocationExplanation add(ShardId shardId, NodeExplanation nodeExplanation) { - List list = explanations.get(shardId); - if (list == null) { - list = new ArrayList<>(); - explanations.put(shardId, list); - } - list.add(nodeExplanation); - return this; - } - - /** - * List of explanations involved by this AllocationExplanation - * @return Map of shard ids and corresponding explanations - */ - public Map> explanations() { - return this.explanations; - } - - /** - * Read an {@link AllocationExplanation} from an {@link StreamInput} - * @param in {@link StreamInput} to read from - * @return a new {@link AllocationExplanation} read from the stream - * @throws IOException if something bad happened while reading - */ - public static AllocationExplanation readAllocationExplanation(StreamInput in) throws IOException { - AllocationExplanation e = new AllocationExplanation(); - e.readFrom(in); - return e; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - int size = in.readVInt(); - for (int i = 0; i < size; i++) { - ShardId shardId = ShardId.readShardId(in); - int size2 = in.readVInt(); - List ne = new ArrayList<>(size2); - for (int j = 0; j < size2; j++) { - DiscoveryNode node = null; - if (in.readBoolean()) { - node = new DiscoveryNode(in); - } - ne.add(new NodeExplanation(node, in.readString())); - } - explanations.put(shardId, ne); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(explanations.size()); - for (Map.Entry> entry : explanations.entrySet()) { - entry.getKey().writeTo(out); - out.writeVInt(entry.getValue().size()); - for (NodeExplanation nodeExplanation : entry.getValue()) { - if (nodeExplanation.node() == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - nodeExplanation.node().writeTo(out); - } - out.writeString(nodeExplanation.description()); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java index 81b9042fb33..b87add57ce7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdSettings.java @@ -49,6 +49,8 @@ public class DiskThresholdSettings { Setting.positiveTimeSetting("cluster.routing.allocation.disk.reroute_interval", TimeValue.timeValueSeconds(60), Setting.Property.Dynamic, Setting.Property.NodeScope); + private volatile String lowWatermarkRaw; + private volatile String highWatermarkRaw; private volatile Double freeDiskThresholdLow; private volatile Double freeDiskThresholdHigh; private volatile ByteSizeValue freeBytesThresholdLow; @@ -86,6 +88,7 @@ public class DiskThresholdSettings { private void setLowWatermark(String lowWatermark) { // Watermark is expressed in terms of used data, but we need "free" data watermark + this.lowWatermarkRaw = lowWatermark; this.freeDiskThresholdLow = 100.0 - thresholdPercentageFromWatermark(lowWatermark); this.freeBytesThresholdLow = thresholdBytesFromWatermark(lowWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey()); @@ -93,11 +96,26 @@ public class DiskThresholdSettings { private void setHighWatermark(String highWatermark) { // Watermark is expressed in terms of used data, but we need "free" data watermark + this.highWatermarkRaw = highWatermark; this.freeDiskThresholdHigh = 100.0 - thresholdPercentageFromWatermark(highWatermark); this.freeBytesThresholdHigh = thresholdBytesFromWatermark(highWatermark, CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey()); } + /** + * Gets the raw (uninterpreted) low watermark value as found in the settings. + */ + public String getLowWatermarkRaw() { + return lowWatermarkRaw; + } + + /** + * Gets the raw (uninterpreted) high watermark value as found in the settings. + */ + public String getHighWatermarkRaw() { + return highWatermarkRaw; + } + public Double getFreeDiskThresholdLow() { return freeDiskThresholdLow; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java index 886b42f57d6..1899ea1cc55 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/RoutingAllocation.java @@ -31,7 +31,6 @@ import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.snapshots.RestoreService; import org.elasticsearch.snapshots.RestoreService.RestoreInProgressUpdater; import java.util.HashMap; @@ -61,8 +60,6 @@ public class RoutingAllocation { private final ImmutableOpenMap customs; - private final AllocationExplanation explanation = new AllocationExplanation(); - private final ClusterInfo clusterInfo; private Map> ignoredShardToNodes = null; @@ -162,14 +159,6 @@ public class RoutingAllocation { return customs; } - /** - * Get explanations of current routing - * @return explanation of routing - */ - public AllocationExplanation explanation() { - return explanation; - } - public void ignoreDisable(boolean ignoreDisable) { this.ignoreDisable = ignoreDisable; } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java index f78dc784d9d..93c45e7832f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java @@ -87,22 +87,6 @@ public class AwarenessAllocationDecider extends AllocationDecider { private volatile Map forcedAwarenessAttributes; - /** - * Creates a new {@link AwarenessAllocationDecider} instance - */ - public AwarenessAllocationDecider() { - this(Settings.Builder.EMPTY_SETTINGS); - } - - /** - * Creates a new {@link AwarenessAllocationDecider} instance from given settings - * - * @param settings {@link Settings} to use - */ - public AwarenessAllocationDecider(Settings settings) { - this(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS)); - } - public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSettings) { super(settings); this.awarenessAttributes = CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.get(settings); @@ -140,7 +124,9 @@ public class AwarenessAllocationDecider extends AllocationDecider { private Decision underCapacity(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation, boolean moveToNode) { if (awarenessAttributes.length == 0) { - return allocation.decision(Decision.YES, NAME, "allocation awareness is not enabled"); + return allocation.decision(Decision.YES, NAME, + "allocation awareness is not enabled, set [%s] to enable it", + CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey()); } IndexMetaData indexMetaData = allocation.metaData().getIndexSafe(shardRouting.index()); @@ -148,7 +134,10 @@ public class AwarenessAllocationDecider extends AllocationDecider { for (String awarenessAttribute : awarenessAttributes) { // the node the shard exists on must be associated with an awareness attribute if (!node.node().getAttributes().containsKey(awarenessAttribute)) { - return allocation.decision(Decision.NO, NAME, "node does not contain the awareness attribute: [%s]", awarenessAttribute); + return allocation.decision(Decision.NO, NAME, + "node does not contain the awareness attribute [%s]; required attributes [%s=%s]", + awarenessAttribute, CLUSTER_ROUTING_ALLOCATION_AWARENESS_ATTRIBUTE_SETTING.getKey(), + allocation.debugDecision() ? Strings.arrayToCommaDelimitedString(awarenessAttributes) : null); } // build attr_value -> nodes map @@ -206,15 +195,14 @@ public class AwarenessAllocationDecider extends AllocationDecider { // if we are above with leftover, then we know we are not good, even with mod if (currentNodeCount > (requiredCountPerAttribute + leftoverPerAttribute)) { return allocation.decision(Decision.NO, NAME, - "there are too many shards on the node for attribute [%s], there are [%d] total shards for the index " + - " and [%d] total attributes values, expected the node count [%d] to be lower or equal to the required " + - "number of shards per attribute [%d] plus leftover [%d]", + "there are too many copies of the shard allocated to nodes with attribute [%s], there are [%d] total configured " + + "shard copies for this shard id and [%d] total attribute values, expected the allocated shard count per " + + "attribute [%d] to be less than or equal to the upper bound of the required number of shards per attribute [%d]", awarenessAttribute, shardCount, numberOfAttributes, currentNodeCount, - requiredCountPerAttribute, - leftoverPerAttribute); + requiredCountPerAttribute + leftoverPerAttribute); } // all is well, we are below or same as average if (currentNodeCount <= requiredCountPerAttribute) { diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java index c343d4254c8..4e4fb58799b 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ClusterRebalanceAllocationDecider.java @@ -48,14 +48,15 @@ import org.elasticsearch.common.settings.Settings; public class ClusterRebalanceAllocationDecider extends AllocationDecider { public static final String NAME = "cluster_rebalance"; + private static final String CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE = "cluster.routing.allocation.allow_rebalance"; public static final Setting CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING = - new Setting<>("cluster.routing.allocation.allow_rebalance", ClusterRebalanceType.INDICES_ALL_ACTIVE.name().toLowerCase(Locale.ROOT), + new Setting<>(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, ClusterRebalanceType.INDICES_ALL_ACTIVE.toString(), ClusterRebalanceType::parseString, Property.Dynamic, Property.NodeScope); /** * An enum representation for the configured re-balance type. */ - public static enum ClusterRebalanceType { + public enum ClusterRebalanceType { /** * Re-balancing is allowed once a shard replication group is active */ @@ -80,6 +81,11 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { throw new IllegalArgumentException("Illegal value for " + CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING + ": " + typeString); } + + @Override + public String toString() { + return name().toLowerCase(Locale.ROOT); + } } private volatile ClusterRebalanceType type; @@ -94,8 +100,7 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getRaw(settings)); type = ClusterRebalanceType.INDICES_ALL_ACTIVE; } - logger.debug("using [{}] with [{}]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING.getKey(), - type.toString().toLowerCase(Locale.ROOT)); + logger.debug("using [{}] with [{}]", CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, type); clusterSettings.addSettingsUpdateConsumer(CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE_SETTING, this::setType); } @@ -115,12 +120,14 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { // check if there are unassigned primaries. if ( allocation.routingNodes().hasUnassignedPrimaries() ) { return allocation.decision(Decision.NO, NAME, - "the cluster has unassigned primary shards and rebalance type is set to [%s]", type); + "the cluster has unassigned primary shards and [%s] is set to [%s]", + CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, type); } // check if there are initializing primaries that don't have a relocatingNodeId entry. if ( allocation.routingNodes().hasInactivePrimaries() ) { return allocation.decision(Decision.NO, NAME, - "the cluster has inactive primary shards and rebalance type is set to [%s]", type); + "the cluster has inactive primary shards and [%s] is set to [%s]", + CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, type); } return allocation.decision(Decision.YES, NAME, "all primary shards are active"); @@ -129,16 +136,18 @@ public class ClusterRebalanceAllocationDecider extends AllocationDecider { // check if there are unassigned shards. if (allocation.routingNodes().hasUnassignedShards() ) { return allocation.decision(Decision.NO, NAME, - "the cluster has unassigned shards and rebalance type is set to [%s]", type); + "the cluster has unassigned shards and [%s] is set to [%s]", + CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, type); } // in case all indices are assigned, are there initializing shards which // are not relocating? if ( allocation.routingNodes().hasInactiveShards() ) { return allocation.decision(Decision.NO, NAME, - "the cluster has inactive shards and rebalance type is set to [%s]", type); + "the cluster has inactive shards and [%s] is set to [%s]", + CLUSTER_ROUTING_ALLOCATION_ALLOW_REBALANCE, type); } } // type == Type.ALWAYS - return allocation.decision(Decision.YES, NAME, "all shards are active, rebalance type is [%s]", type); + return allocation.decision(Decision.YES, NAME, "all shards are active"); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java index dd3ece10dd5..6ec123ddab3 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ConcurrentRebalanceAllocationDecider.java @@ -66,9 +66,11 @@ public class ConcurrentRebalanceAllocationDecider extends AllocationDecider { } int relocatingShards = allocation.routingNodes().getRelocatingShardCount(); if (relocatingShards >= clusterConcurrentRebalance) { - return allocation.decision(Decision.NO, NAME, - "too many shards are concurrently rebalancing [%d], limit: [%d]", - relocatingShards, clusterConcurrentRebalance); + return allocation.decision(Decision.THROTTLE, NAME, + "reached the limit of concurrently rebalancing shards [%d], [%s=%d]", + relocatingShards, + CLUSTER_ROUTING_ALLOCATION_CLUSTER_CONCURRENT_REBALANCE_SETTING.getKey(), + clusterConcurrentRebalance); } return allocation.decision(Decision.YES, NAME, "below threshold [%d] for concurrent rebalances, current rebalance shard count [%d]", diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index 53d3dd29034..5eb1ae1751e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -40,6 +40,9 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; +import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING; +import static org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING; + /** * The {@link DiskThresholdDecider} checks that the node a shard is potentially * being allocated to has enough disk space. @@ -135,8 +138,10 @@ public class DiskThresholdDecider extends AllocationDecider { diskThresholdSettings.getFreeBytesThresholdLow(), freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "the node is above the low watermark and has less than required [%s] free, free: [%s]", - diskThresholdSettings.getFreeBytesThresholdLow(), new ByteSizeValue(freeBytes)); + "the node is above the low watermark [%s=%s], having less than the minimum required [%s] free space, actual free: [%s]", + CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), + diskThresholdSettings.getLowWatermarkRaw(), + diskThresholdSettings.getFreeBytesThresholdLow(), new ByteSizeValue(freeBytes)); } else if (freeBytes > diskThresholdSettings.getFreeBytesThresholdHigh().getBytes()) { // Allow the shard to be allocated because it is primary that // has never been allocated if it's under the high watermark @@ -146,7 +151,8 @@ public class DiskThresholdDecider extends AllocationDecider { diskThresholdSettings.getFreeBytesThresholdLow(), freeBytes, node.nodeId()); } return allocation.decision(Decision.YES, NAME, - "the node is above the low watermark, but this primary shard has never been allocated before"); + "the node is above the low watermark, but less than the high watermark, and this primary shard has " + + "never been allocated before"); } else { // Even though the primary has never been allocated, the node is // above the high watermark, so don't allow allocating the shard @@ -156,9 +162,11 @@ public class DiskThresholdDecider extends AllocationDecider { diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "the node is above the high watermark even though this shard has never been allocated " + - "and has less than required [%s] free on node, free: [%s]", - diskThresholdSettings.getFreeBytesThresholdHigh(), new ByteSizeValue(freeBytes)); + "the node is above the high watermark [%s=%s], having less than the minimum required [%s] free space, " + + "actual free: [%s]", + CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), + diskThresholdSettings.getHighWatermarkRaw(), + diskThresholdSettings.getFreeBytesThresholdHigh(), new ByteSizeValue(freeBytes)); } } @@ -172,8 +180,10 @@ public class DiskThresholdDecider extends AllocationDecider { Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "the node is above the low watermark and has more than allowed [%s%%] used disk, free: [%s%%]", - usedDiskThresholdLow, freeDiskPercentage); + "the node is above the low watermark [%s=%s], using more disk space than the maximum allowed [%s%%], " + + "actual free: [%s%%]", + CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), + diskThresholdSettings.getLowWatermarkRaw(), usedDiskThresholdLow, freeDiskPercentage); } else if (freeDiskPercentage > diskThresholdSettings.getFreeDiskThresholdHigh()) { // Allow the shard to be allocated because it is primary that // has never been allocated if it's under the high watermark @@ -184,7 +194,8 @@ public class DiskThresholdDecider extends AllocationDecider { Strings.format1Decimals(usedDiskPercentage, "%"), node.nodeId()); } return allocation.decision(Decision.YES, NAME, - "the node is above the low watermark, but this primary shard has never been allocated before"); + "the node is above the low watermark, but less than the high watermark, and this primary shard has " + + "never been allocated before"); } else { // Even though the primary has never been allocated, the node is // above the high watermark, so don't allow allocating the shard @@ -195,9 +206,10 @@ public class DiskThresholdDecider extends AllocationDecider { Strings.format1Decimals(freeDiskPercentage, "%"), node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "the node is above the high watermark even though this shard has never been allocated " + - "and has more than allowed [%s%%] used disk, free: [%s%%]", - usedDiskThresholdHigh, freeDiskPercentage); + "the node is above the high watermark [%s=%s], using more disk space than the maximum allowed [%s%%], " + + "actual free: [%s%%]", + CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), + diskThresholdSettings.getHighWatermarkRaw(), usedDiskThresholdHigh, freeDiskPercentage); } } @@ -210,9 +222,11 @@ public class DiskThresholdDecider extends AllocationDecider { "{} free bytes threshold ({} bytes free), preventing allocation", node.nodeId(), diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytesAfterShard); return allocation.decision(Decision.NO, NAME, - "after allocating the shard to this node, it would be above the high watermark " + - "and have less than required [%s] free, free: [%s]", - diskThresholdSettings.getFreeBytesThresholdLow(), new ByteSizeValue(freeBytesAfterShard)); + "allocating the shard to this node will bring the node above the high watermark [%s=%s] " + + "and cause it to have less than the minimum required [%s] of free space (free bytes after shard added: [%s])", + CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), + diskThresholdSettings.getHighWatermarkRaw(), + diskThresholdSettings.getFreeBytesThresholdHigh(), new ByteSizeValue(freeBytesAfterShard)); } if (freeSpaceAfterShard < diskThresholdSettings.getFreeDiskThresholdHigh()) { logger.warn("after allocating, node [{}] would have more than the allowed " + @@ -220,9 +234,10 @@ public class DiskThresholdDecider extends AllocationDecider { node.nodeId(), Strings.format1Decimals(diskThresholdSettings.getFreeDiskThresholdHigh(), "%"), Strings.format1Decimals(freeSpaceAfterShard, "%")); return allocation.decision(Decision.NO, NAME, - "after allocating the shard to this node, it would be above the high watermark " + - "and have more than allowed [%s%%] used disk, free: [%s%%]", - usedDiskThresholdLow, freeSpaceAfterShard); + "allocating the shard to this node will bring the node above the high watermark [%s=%s] " + + "and cause it to use more disk space than the maximum allowed [%s%%] (free space after shard added: [%s%%])", + CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), + diskThresholdSettings.getHighWatermarkRaw(), usedDiskThresholdHigh, freeSpaceAfterShard); } return allocation.decision(Decision.YES, NAME, @@ -264,9 +279,11 @@ public class DiskThresholdDecider extends AllocationDecider { diskThresholdSettings.getFreeBytesThresholdHigh(), freeBytes, node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "after allocating this shard this node would be above the high watermark " + - "and there would be less than required [%s] free on node, free: [%s]", - diskThresholdSettings.getFreeBytesThresholdHigh(), new ByteSizeValue(freeBytes)); + "the shard cannot remain on this node because it is above the high watermark [%s=%s] " + + "and there is less than the required [%s] free space on node, actual free: [%s]", + CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), + diskThresholdSettings.getHighWatermarkRaw(), + diskThresholdSettings.getFreeBytesThresholdHigh(), new ByteSizeValue(freeBytes)); } if (freeDiskPercentage < diskThresholdSettings.getFreeDiskThresholdHigh()) { if (logger.isDebugEnabled()) { @@ -274,9 +291,11 @@ public class DiskThresholdDecider extends AllocationDecider { diskThresholdSettings.getFreeDiskThresholdHigh(), freeDiskPercentage, node.nodeId()); } return allocation.decision(Decision.NO, NAME, - "after allocating this shard this node would be above the high watermark " + - "and there would be less than required [%s%%] free disk on node, free: [%s%%]", - diskThresholdSettings.getFreeDiskThresholdHigh(), freeDiskPercentage); + "the shard cannot remain on this node because it is above the high watermark [%s=%s] " + + "and there is less than the required [%s%%] free disk on node, actual free: [%s%%]", + CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), + diskThresholdSettings.getHighWatermarkRaw(), + diskThresholdSettings.getFreeDiskThresholdHigh(), freeDiskPercentage); } return allocation.decision(Decision.YES, NAME, diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java index 1a38e3742fc..2bb5012da30 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/EnableAllocationDecider.java @@ -98,7 +98,8 @@ public class EnableAllocationDecider extends AllocationDecider { @Override public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { if (allocation.ignoreDisable()) { - return allocation.decision(Decision.YES, NAME, "allocation is explicitly ignoring any disabling of allocation"); + return allocation.decision(Decision.YES, NAME, + "explicitly ignoring any disabling of allocation due to manual allocation commands via the reroute API"); } final IndexMetaData indexMetaData = allocation.metaData().getIndexSafe(shardRouting.index()); diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java index a42db129da9..21b6b3d1354 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/FilterAllocationDecider.java @@ -64,12 +64,15 @@ public class FilterAllocationDecider extends AllocationDecider { public static final String NAME = "filter"; + private static final String CLUSTER_ROUTING_REQUIRE_GROUP_PREFIX = "cluster.routing.allocation.require"; + private static final String CLUSTER_ROUTING_INCLUDE_GROUP_PREFIX = "cluster.routing.allocation.include"; + private static final String CLUSTER_ROUTING_EXCLUDE_GROUP_PREFIX = "cluster.routing.allocation.exclude"; public static final Setting CLUSTER_ROUTING_REQUIRE_GROUP_SETTING = - Setting.groupSetting("cluster.routing.allocation.require.", Property.Dynamic, Property.NodeScope); + Setting.groupSetting(CLUSTER_ROUTING_REQUIRE_GROUP_PREFIX + ".", Property.Dynamic, Property.NodeScope); public static final Setting CLUSTER_ROUTING_INCLUDE_GROUP_SETTING = - Setting.groupSetting("cluster.routing.allocation.include.", Property.Dynamic, Property.NodeScope); + Setting.groupSetting(CLUSTER_ROUTING_INCLUDE_GROUP_PREFIX + ".", Property.Dynamic, Property.NodeScope); public static final Setting CLUSTER_ROUTING_EXCLUDE_GROUP_SETTING = - Setting.groupSetting("cluster.routing.allocation.exclude.", Property.Dynamic, Property.NodeScope); + Setting.groupSetting(CLUSTER_ROUTING_EXCLUDE_GROUP_PREFIX + ".", Property.Dynamic, Property.NodeScope); private volatile DiscoveryNodeFilters clusterRequireFilters; private volatile DiscoveryNodeFilters clusterIncludeFilters; @@ -96,8 +99,10 @@ public class FilterAllocationDecider extends AllocationDecider { if (initialRecoveryFilters != null && RecoverySource.isInitialRecovery(shardRouting.recoverySource().getType()) && initialRecoveryFilters.match(node.node()) == false) { - return allocation.decision(Decision.NO, NAME, "node does not match index initial recovery filters [%s]", - indexMd.includeFilters()); + String explanation = (shardRouting.recoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) ? + "initial allocation of the shrunken index is only allowed on nodes [%s] that hold a copy of every shard in the index" : + "initial allocation of the index is only allowed on nodes [%s]"; + return allocation.decision(Decision.NO, NAME, explanation, initialRecoveryFilters); } } return shouldFilter(shardRouting, node, allocation); @@ -136,17 +141,20 @@ public class FilterAllocationDecider extends AllocationDecider { private Decision shouldIndexFilter(IndexMetaData indexMd, RoutingNode node, RoutingAllocation allocation) { if (indexMd.requireFilters() != null) { if (!indexMd.requireFilters().match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not match index required filters [%s]", indexMd.requireFilters()); + return allocation.decision(Decision.NO, NAME, "node does not match [%s] filters [%s]", + IndexMetaData.INDEX_ROUTING_REQUIRE_GROUP_PREFIX, indexMd.requireFilters()); } } if (indexMd.includeFilters() != null) { if (!indexMd.includeFilters().match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not match index include filters [%s]", indexMd.includeFilters()); + return allocation.decision(Decision.NO, NAME, "node does not match [%s] filters [%s]", + IndexMetaData.INDEX_ROUTING_INCLUDE_GROUP_PREFIX, indexMd.includeFilters()); } } if (indexMd.excludeFilters() != null) { if (indexMd.excludeFilters().match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node matches index exclude filters [%s]", indexMd.excludeFilters()); + return allocation.decision(Decision.NO, NAME, "node matches [%s] filters [%s]", + IndexMetaData.INDEX_ROUTING_EXCLUDE_GROUP_SETTING.getKey(), indexMd.excludeFilters()); } } return null; @@ -155,17 +163,20 @@ public class FilterAllocationDecider extends AllocationDecider { private Decision shouldClusterFilter(RoutingNode node, RoutingAllocation allocation) { if (clusterRequireFilters != null) { if (!clusterRequireFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not match global required filters [%s]", clusterRequireFilters); + return allocation.decision(Decision.NO, NAME, "node does not match [%s] filters [%s]", + CLUSTER_ROUTING_REQUIRE_GROUP_PREFIX, clusterRequireFilters); } } if (clusterIncludeFilters != null) { if (!clusterIncludeFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node does not match global include filters [%s]", clusterIncludeFilters); + return allocation.decision(Decision.NO, NAME, "node does not [%s] filters [%s]", + CLUSTER_ROUTING_INCLUDE_GROUP_PREFIX, clusterIncludeFilters); } } if (clusterExcludeFilters != null) { if (clusterExcludeFilters.match(node.node())) { - return allocation.decision(Decision.NO, NAME, "node matches global exclude filters [%s]", clusterExcludeFilters); + return allocation.decision(Decision.NO, NAME, "node matches [%s] filters [%s]", + CLUSTER_ROUTING_EXCLUDE_GROUP_PREFIX, clusterExcludeFilters); } } return null; diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDecider.java index d8042f18a27..c4cd2ecf50d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/RebalanceOnlyWhenActiveAllocationDecider.java @@ -37,8 +37,8 @@ public class RebalanceOnlyWhenActiveAllocationDecider extends AllocationDecider @Override public Decision canRebalance(ShardRouting shardRouting, RoutingAllocation allocation) { if (!allocation.routingNodes().allReplicasActive(shardRouting.shardId(), allocation.metaData())) { - return allocation.decision(Decision.NO, NAME, "rebalancing can not occur if not all replicas are active in the cluster"); + return allocation.decision(Decision.NO, NAME, "rebalancing is not allowed until all replicas in the cluster are active"); } - return allocation.decision(Decision.YES, NAME, "all replicas are active in the cluster, rebalancing can occur"); + return allocation.decision(Decision.YES, NAME, "rebalancing is allowed as all replicas are active in the cluster"); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java index 3f2921dfcdc..afd742dd041 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SameShardAllocationDecider.java @@ -61,8 +61,15 @@ public class SameShardAllocationDecider extends AllocationDecider { Iterable assignedShards = allocation.routingNodes().assignedShards(shardRouting.shardId()); for (ShardRouting assignedShard : assignedShards) { if (node.nodeId().equals(assignedShard.currentNodeId())) { - return allocation.decision(Decision.NO, NAME, - "the shard cannot be allocated on the same node id [%s] on which it already exists", node.nodeId()); + if (assignedShard.isSameAllocation(shardRouting)) { + return allocation.decision(Decision.NO, NAME, + "the shard cannot be allocated to the node on which it already exists [%s]", + shardRouting.toString()); + } else { + return allocation.decision(Decision.NO, NAME, + "the shard cannot be allocated to the same node on which a copy of the shard [%s] already exists", + assignedShard.toString()); + } } } if (sameHost) { @@ -72,27 +79,32 @@ public class SameShardAllocationDecider extends AllocationDecider { continue; } // check if its on the same host as the one we want to allocate to - boolean checkNodeOnSameHost = false; + boolean checkNodeOnSameHostName = false; + boolean checkNodeOnSameHostAddress = false; if (Strings.hasLength(checkNode.node().getHostAddress()) && Strings.hasLength(node.node().getHostAddress())) { if (checkNode.node().getHostAddress().equals(node.node().getHostAddress())) { - checkNodeOnSameHost = true; + checkNodeOnSameHostAddress = true; } } else if (Strings.hasLength(checkNode.node().getHostName()) && Strings.hasLength(node.node().getHostName())) { if (checkNode.node().getHostName().equals(node.node().getHostName())) { - checkNodeOnSameHost = true; + checkNodeOnSameHostName = true; } } - if (checkNodeOnSameHost) { + if (checkNodeOnSameHostAddress || checkNodeOnSameHostName) { for (ShardRouting assignedShard : assignedShards) { if (checkNode.nodeId().equals(assignedShard.currentNodeId())) { + String hostType = checkNodeOnSameHostAddress ? "address" : "name"; + String host = checkNodeOnSameHostAddress ? node.node().getHostAddress() : node.node().getHostName(); return allocation.decision(Decision.NO, NAME, - "shard cannot be allocated on the same host [%s] on which it already exists", node.nodeId()); + "the shard cannot be allocated on host %s [%s], where it already exists on node [%s]; " + + "set [%s] to false to allow multiple nodes on the same host to hold the same shard copies", + hostType, host, node.nodeId(), CLUSTER_ROUTING_ALLOCATION_SAME_HOST_SETTING.getKey()); } } } } } } - return allocation.decision(Decision.YES, NAME, "shard is not allocated to same node or host"); + return allocation.decision(Decision.YES, NAME, "the shard does not exist on the same " + (sameHost ? "host" : "node")); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java index aa4fe3d593d..15f2cf20c1a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ShardsLimitAllocationDecider.java @@ -107,17 +107,18 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { } } if (clusterShardLimit > 0 && nodeShardCount >= clusterShardLimit) { - return allocation.decision(Decision.NO, NAME, "too many shards for this node [%d], cluster-level limit per node: [%d]", - nodeShardCount, clusterShardLimit); + return allocation.decision(Decision.NO, NAME, + "too many shards [%d] allocated to this node, [%s=%d]", + nodeShardCount, CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), clusterShardLimit); } if (indexShardLimit > 0 && indexShardCount >= indexShardLimit) { return allocation.decision(Decision.NO, NAME, - "too many shards for this index [%s] on node [%d], index-level limit per node: [%d]", - shardRouting.index(), indexShardCount, indexShardLimit); + "too many shards [%d] allocated to this node for index [%s], [%s=%d]", + indexShardCount, shardRouting.getIndexName(), INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), indexShardLimit); } return allocation.decision(Decision.YES, NAME, - "the shard count is under index limit [%d] and cluster level node limit [%d] of total shards per node", - indexShardLimit, clusterShardLimit); + "the shard count [%d] for this node is under the index limit [%d] and cluster level node limit [%d]", + nodeShardCount, indexShardLimit, clusterShardLimit); } @Override @@ -148,17 +149,18 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { // Subtle difference between the `canAllocate` and `canRemain` is that // this checks > while canAllocate checks >= if (clusterShardLimit > 0 && nodeShardCount > clusterShardLimit) { - return allocation.decision(Decision.NO, NAME, "too many shards for this node [%d], cluster-level limit per node: [%d]", - nodeShardCount, clusterShardLimit); + return allocation.decision(Decision.NO, NAME, + "too many shards [%d] allocated to this node, [%s=%d]", + nodeShardCount, CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), clusterShardLimit); } if (indexShardLimit > 0 && indexShardCount > indexShardLimit) { return allocation.decision(Decision.NO, NAME, - "too many shards for this index [%s] on node [%d], index-level limit per node: [%d]", - shardRouting.index(), indexShardCount, indexShardLimit); + "too many shards [%d] allocated to this node for index [%s], [%s=%d]", + indexShardCount, shardRouting.getIndexName(), INDEX_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), indexShardLimit); } return allocation.decision(Decision.YES, NAME, - "the shard count is under index limit [%d] and cluster level node limit [%d] of total shards per node", - indexShardLimit, clusterShardLimit); + "the shard count [%d] for this node is under the index limit [%d] and cluster level node limit [%d]", + nodeShardCount, indexShardLimit, clusterShardLimit); } @Override @@ -182,10 +184,12 @@ public class ShardsLimitAllocationDecider extends AllocationDecider { nodeShardCount++; } if (clusterShardLimit >= 0 && nodeShardCount >= clusterShardLimit) { - return allocation.decision(Decision.NO, NAME, "too many shards for this node [%d], cluster-level limit per node: [%d]", - nodeShardCount, clusterShardLimit); + return allocation.decision(Decision.NO, NAME, + "too many shards [%d] allocated to this node, [%s=%d]", + nodeShardCount, CLUSTER_TOTAL_SHARDS_PER_NODE_SETTING.getKey(), clusterShardLimit); } - return allocation.decision(Decision.YES, NAME, "the shard count is under node limit [%d] of total shards per node", - clusterShardLimit); + return allocation.decision(Decision.YES, NAME, + "the shard count [%d] for this node is under the cluster level node limit [%d]", + nodeShardCount, clusterShardLimit); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java index 3c20f1ec062..18ee6395bd4 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/SnapshotInProgressAllocationDecider.java @@ -77,15 +77,16 @@ public class SnapshotInProgressAllocationDecider extends AllocationDecider { if (shardSnapshotStatus != null && !shardSnapshotStatus.state().completed() && shardSnapshotStatus.nodeId() != null && shardSnapshotStatus.nodeId().equals(shardRouting.currentNodeId())) { if (logger.isTraceEnabled()) { - logger.trace("Preventing snapshotted shard [{}] to be moved from node [{}]", + logger.trace("Preventing snapshotted shard [{}] from being moved away from node [{}]", shardRouting.shardId(), shardSnapshotStatus.nodeId()); } - return allocation.decision(Decision.NO, NAME, "snapshot for shard [%s] is currently running on node [%s]", - shardRouting.shardId(), shardSnapshotStatus.nodeId()); + return allocation.decision(Decision.THROTTLE, NAME, + "waiting for snapshotting of shard [%s] to complete on this node [%s]", + shardRouting.shardId(), shardSnapshotStatus.nodeId()); } } } - return allocation.decision(Decision.YES, NAME, "the shard is not primary or relocation is disabled"); + return allocation.decision(Decision.YES, NAME, "the shard is not being snapshotted"); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java index df2e1d12234..a59f543ac3f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/ThrottlingAllocationDecider.java @@ -126,8 +126,9 @@ public class ThrottlingAllocationDecider extends AllocationDecider { } if (primariesInRecovery >= primariesInitialRecoveries) { // TODO: Should index creation not be throttled for primary shards? - return allocation.decision(THROTTLE, NAME, "too many primaries are currently recovering [%d], limit: [%d]", - primariesInRecovery, primariesInitialRecoveries); + return allocation.decision(THROTTLE, NAME, "reached the limit of ongoing initial primary recoveries [%d], [%s=%d]", + primariesInRecovery, CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING.getKey(), + primariesInitialRecoveries); } else { return allocation.decision(YES, NAME, "below primary recovery limit of [%d]", primariesInitialRecoveries); } @@ -138,8 +139,11 @@ public class ThrottlingAllocationDecider extends AllocationDecider { // Allocating a shard to this node will increase the incoming recoveries int currentInRecoveries = allocation.routingNodes().getIncomingRecoveries(node.nodeId()); if (currentInRecoveries >= concurrentIncomingRecoveries) { - return allocation.decision(THROTTLE, NAME, "too many incoming shards are currently recovering [%d], limit: [%d]", - currentInRecoveries, concurrentIncomingRecoveries); + return allocation.decision(THROTTLE, NAME, + "reached the limit of incoming shard recoveries [%d], [%s=%d] (can also be set via [%s])", + currentInRecoveries, CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_INCOMING_RECOVERIES_SETTING.getKey(), + concurrentIncomingRecoveries, + CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey()); } else { // search for corresponding recovery source (= primary shard) and check number of outgoing recoveries on that node ShardRouting primaryShard = allocation.routingNodes().activePrimary(shardRouting.shardId()); @@ -148,8 +152,13 @@ public class ThrottlingAllocationDecider extends AllocationDecider { } int primaryNodeOutRecoveries = allocation.routingNodes().getOutgoingRecoveries(primaryShard.currentNodeId()); if (primaryNodeOutRecoveries >= concurrentOutgoingRecoveries) { - return allocation.decision(THROTTLE, NAME, "too many outgoing shards are currently recovering [%d], limit: [%d]", - primaryNodeOutRecoveries, concurrentOutgoingRecoveries); + return allocation.decision(THROTTLE, NAME, + "reached the limit of outgoing shard recoveries [%d] on the node [%s] which holds the primary, " + + "[%s=%d] (can also be set via [%s])", + primaryNodeOutRecoveries, node.nodeId(), + CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_OUTGOING_RECOVERIES_SETTING.getKey(), + concurrentOutgoingRecoveries, + CLUSTER_ROUTING_ALLOCATION_NODE_CONCURRENT_RECOVERIES_SETTING.getKey()); } else { return allocation.decision(YES, NAME, "below shard recovery limit of outgoing: [%d < %d] incoming: [%d < %d]", primaryNodeOutRecoveries, diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index d33616cbe60..2046b1a6e14 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -26,7 +26,6 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; import java.io.IOException; @@ -41,9 +40,9 @@ public class GeoUtils { /** Minimum valid longitude in degrees. */ public static final double MIN_LON = -180.0; - public static final String LATITUDE = GeoPointFieldMapper.Names.LAT; - public static final String LONGITUDE = GeoPointFieldMapper.Names.LON; - public static final String GEOHASH = GeoPointFieldMapper.Names.GEOHASH; + public static final String LATITUDE = "lat"; + public static final String LONGITUDE = "lon"; + public static final String GEOHASH = "geohash"; /** Earth ellipsoid major axis defined by WGS 84 in meters */ public static final double EARTH_SEMI_MAJOR_AXIS = 6378137.0; // meters (WGS 84) diff --git a/core/src/main/java/org/elasticsearch/common/io/stream/BytesStreamOutput.java b/core/src/main/java/org/elasticsearch/common/io/stream/BytesStreamOutput.java index 21de0c421b7..e65e8efb27b 100644 --- a/core/src/main/java/org/elasticsearch/common/io/stream/BytesStreamOutput.java +++ b/core/src/main/java/org/elasticsearch/common/io/stream/BytesStreamOutput.java @@ -75,7 +75,7 @@ public class BytesStreamOutput extends StreamOutput implements BytesStream { } @Override - public void writeBytes(byte[] b, int offset, int length) throws IOException { + public void writeBytes(byte[] b, int offset, int length) { // nothing to copy if (length == 0) { return; diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index fabce2d7562..f6d0ffa0bd2 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -73,7 +73,6 @@ import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; -import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.monitor.fs.FsService; import org.elasticsearch.monitor.jvm.JvmGcMonitorService; import org.elasticsearch.monitor.jvm.JvmService; @@ -185,7 +184,6 @@ public final class ClusterSettings extends AbstractScopedSettings { IndicesQueryCache.INDICES_CACHE_QUERY_SIZE_SETTING, IndicesQueryCache.INDICES_CACHE_QUERY_COUNT_SETTING, IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING, - IndicesTTLService.INDICES_TTL_INTERVAL_SETTING, MappingUpdatedAction.INDICES_MAPPING_DYNAMIC_TIMEOUT_SETTING, MetaData.SETTING_READ_ONLY_SETTING, RecoverySettings.INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, diff --git a/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java b/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java index 516f3286d35..4881398823d 100644 --- a/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java +++ b/core/src/main/java/org/elasticsearch/common/transport/TransportAddress.java @@ -78,7 +78,13 @@ public final class TransportAddress implements Writeable { final int len = in.readByte(); final byte[] a = new byte[len]; // 4 bytes (IPv4) or 16 bytes (IPv6) in.readFully(a); - InetAddress inetAddress = InetAddress.getByAddress(a); + final InetAddress inetAddress; + if (in.getVersion().onOrAfter(Version.V_5_0_3_UNRELEASED)) { + String host = in.readString(); + inetAddress = InetAddress.getByAddress(host, a); + } else { + inetAddress = InetAddress.getByAddress(a); + } int port = in.readInt(); this.address = new InetSocketAddress(inetAddress, port); } @@ -91,6 +97,9 @@ public final class TransportAddress implements Writeable { byte[] bytes = address.getAddress().getAddress(); // 4 bytes (IPv4) or 16 bytes (IPv6) out.writeByte((byte) bytes.length); // 1 byte out.write(bytes, 0, bytes.length); + if (out.getVersion().onOrAfter(Version.V_5_0_3_UNRELEASED)) { + out.writeString(address.getHostString()); + } // don't serialize scope ids over the network!!!! // these only make sense with respect to the local machine, and will only formulate // the address incorrectly remotely. diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java b/core/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java index 7ff8f935927..c8527168198 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java @@ -19,14 +19,16 @@ package org.elasticsearch.discovery.zen; +import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.discovery.zen.DiscoveryNodesProvider; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -37,6 +39,7 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; public class MembershipAction extends AbstractComponent { @@ -58,21 +61,20 @@ public class MembershipAction extends AbstractComponent { private final TransportService transportService; - private final DiscoveryNodesProvider nodesProvider; - private final MembershipListener listener; public MembershipAction(Settings settings, TransportService transportService, - DiscoveryNodesProvider nodesProvider, MembershipListener listener) { + Supplier localNodeSupplier, MembershipListener listener) { super(settings); this.transportService = transportService; - this.nodesProvider = nodesProvider; this.listener = listener; + transportService.registerRequestHandler(DISCOVERY_JOIN_ACTION_NAME, JoinRequest::new, ThreadPool.Names.GENERIC, new JoinRequestRequestHandler()); - transportService.registerRequestHandler(DISCOVERY_JOIN_VALIDATE_ACTION_NAME, ValidateJoinRequest::new, - ThreadPool.Names.GENERIC, new ValidateJoinRequestRequestHandler()); + transportService.registerRequestHandler(DISCOVERY_JOIN_VALIDATE_ACTION_NAME, + () -> new ValidateJoinRequest(localNodeSupplier), ThreadPool.Names.GENERIC, + new ValidateJoinRequestRequestHandler()); transportService.registerRequestHandler(DISCOVERY_LEAVE_ACTION_NAME, LeaveRequest::new, ThreadPool.Names.GENERIC, new LeaveRequestRequestHandler()); } @@ -152,20 +154,23 @@ public class MembershipAction extends AbstractComponent { } } - class ValidateJoinRequest extends TransportRequest { + static class ValidateJoinRequest extends TransportRequest { + private final Supplier localNode; private ClusterState state; - ValidateJoinRequest() { + ValidateJoinRequest(Supplier localNode) { + this.localNode = localNode; } ValidateJoinRequest(ClusterState state) { this.state = state; + this.localNode = state.nodes()::getLocalNode; } @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - this.state = ClusterState.Builder.readFrom(in, nodesProvider.nodes().getLocalNode()); + this.state = ClusterState.Builder.readFrom(in, localNode.get()); } @Override @@ -175,15 +180,31 @@ public class MembershipAction extends AbstractComponent { } } - class ValidateJoinRequestRequestHandler implements TransportRequestHandler { + static class ValidateJoinRequestRequestHandler implements TransportRequestHandler { @Override public void messageReceived(ValidateJoinRequest request, TransportChannel channel) throws Exception { + ensureIndexCompatibility(Version.CURRENT.minimumIndexCompatibilityVersion(), request.state.getMetaData()); // for now, the mere fact that we can serialize the cluster state acts as validation.... channel.sendResponse(TransportResponse.Empty.INSTANCE); } } + /** + * Ensures that all indices are compatible with the supported index version. + * @throws IllegalStateException if any index is incompatible with the given version + */ + static void ensureIndexCompatibility(final Version supportedIndexVersion, MetaData metaData) { + // we ensure that all indices in the cluster we join are compatible with us no matter if they are + // closed or not we can't read mappings of these indices so we need to reject the join... + for (IndexMetaData idxMetaData : metaData) { + if (idxMetaData.getCreationVersion().before(supportedIndexVersion)) { + throw new IllegalStateException("index " + idxMetaData.getIndex() + " version not supported: " + + idxMetaData.getCreationVersion() + " minimum compatible index version is: " + supportedIndexVersion); + } + } + } + public static class LeaveRequest extends TransportRequest { private DiscoveryNode node; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java index 6d77e2f48fe..4dce87aa86e 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/NodeJoinController.java @@ -410,7 +410,6 @@ public class NodeJoinController extends AbstractComponent { @Override public BatchResult execute(ClusterState currentState, List joiningNodes) throws Exception { final BatchResult.Builder results = BatchResult.builder(); - final DiscoveryNodes currentNodes = currentState.nodes(); boolean nodesChanged = false; ClusterState.Builder newState; @@ -435,8 +434,10 @@ public class NodeJoinController extends AbstractComponent { assert nodesBuilder.isLocalNodeElectedMaster(); + Version minNodeVersion = Version.CURRENT; // processing any joins for (final DiscoveryNode node : joiningNodes) { + minNodeVersion = Version.min(minNodeVersion, node.getVersion()); if (node.equals(BECOME_MASTER_TASK) || node.equals(FINISH_ELECTION_TASK)) { // noop } else if (currentNodes.nodeExists(node)) { @@ -452,7 +453,9 @@ public class NodeJoinController extends AbstractComponent { } results.success(node); } - + // we do this validation quite late to prevent race conditions between nodes joining and importing dangling indices + // we have to reject nodes that don't support all indices we have in this cluster + MembershipAction.ensureIndexCompatibility(minNodeVersion.minimumIndexCompatibilityVersion(), currentState.getMetaData()); if (nodesChanged) { newState.nodes(nodesBuilder); return results.build(allocationService.reroute(newState.build(), "node_join")); diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index eec9548dd08..bb5ecf6b123 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -64,6 +64,7 @@ import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Queue; @@ -468,7 +469,7 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { // connect to the node, see if we manage to do it, if not, bail if (!nodeFoundByAddress) { logger.trace("[{}] connecting (light) to {}", sendPingsHandler.id(), finalNodeToSend); - transportService.connectToNodeLightAndHandshake(finalNodeToSend, timeout.getMillis()); + transportService.connectToNodeAndHandshake(finalNodeToSend, timeout.getMillis()); } else { logger.trace("[{}] connecting to {}", sendPingsHandler.id(), finalNodeToSend); transportService.connectToNode(finalNodeToSend); @@ -584,7 +585,6 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { List pingResponses = CollectionUtils.iterableAsArrayList(temporalResponses); pingResponses.add(createPingResponse(contextProvider.nodes())); - UnicastPingResponse unicastPingResponse = new UnicastPingResponse(); unicastPingResponse.id = request.id; unicastPingResponse.pingResponses = pingResponses.toArray(new PingResponse[pingResponses.size()]); @@ -596,8 +596,18 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { @Override public void messageReceived(UnicastPingRequest request, TransportChannel channel) throws Exception { - channel.sendResponse(handlePingRequest(request)); + if (request.pingResponse.clusterName().equals(clusterName)) { + channel.sendResponse(handlePingRequest(request)); + } else { + throw new IllegalStateException( + String.format( + Locale.ROOT, + "mismatched cluster names; request: [%s], local: [%s]", + request.pingResponse.clusterName().value(), + clusterName.value())); + } } + } public static class UnicastPingRequest extends TransportRequest { diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 272a75f4e7a..5634f5d89a0 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -43,12 +43,10 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.Lifecycle; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lease.Releasables; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -185,7 +183,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover new NewPendingClusterStateListener(), discoverySettings, clusterService.getClusterName()); - this.membership = new MembershipAction(settings, transportService, this, new MembershipListener()); + this.membership = new MembershipAction(settings, transportService, this::localNode, new MembershipListener()); this.joinThreadControl = new JoinThreadControl(threadPool); transportService.registerRequestHandler( @@ -303,7 +301,6 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover if (!clusterChangedEvent.state().getNodes().isLocalNodeElectedMaster()) { throw new IllegalStateException("Shouldn't publish state when not master"); } - try { publishClusterState.publish(clusterChangedEvent, electMaster.minimumMasterNodes(), ackListener); } catch (FailedToCommitClusterStateException t) { @@ -851,12 +848,12 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover } void handleJoinRequest(final DiscoveryNode node, final ClusterState state, final MembershipAction.JoinCallback callback) { - if (!transportService.addressSupported(node.getAddress().getClass())) { - // TODO, what should we do now? Maybe inform that node that its crap? - logger.warn("received a wrong address type from [{}], ignoring...", node); - } else if (nodeJoinController == null) { + if (nodeJoinController == null) { throw new IllegalStateException("discovery module is not yet started"); } else { + // we do this in a couple of places including the cluster update thread. This one here is really just best effort + // to ensure we fail as fast as possible. + MembershipAction.ensureIndexCompatibility(node.getVersion().minimumIndexCompatibilityVersion(), state.getMetaData()); // try and connect to the node, if it fails, we can raise an exception back to the client... transportService.connectToNode(node); diff --git a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java index b609d0bacae..98b39cd2c8a 100644 --- a/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java +++ b/core/src/main/java/org/elasticsearch/gateway/GatewayMetaState.java @@ -245,7 +245,8 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL boolean changed = false; final MetaData.Builder upgradedMetaData = MetaData.builder(metaData); for (IndexMetaData indexMetaData : metaData) { - IndexMetaData newMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData); + IndexMetaData newMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData, + Version.CURRENT.minimumIndexCompatibilityVersion()); changed |= indexMetaData != newMetaData; upgradedMetaData.put(newMetaData, false); } diff --git a/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java b/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java index dcaccb88269..b1891191500 100644 --- a/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java +++ b/core/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java @@ -21,6 +21,7 @@ package org.elasticsearch.gateway; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlocks; @@ -28,6 +29,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.ClusterService; @@ -126,10 +128,18 @@ public class LocalAllocateDangledIndices extends AbstractComponent { MetaData.Builder metaData = MetaData.builder(currentState.metaData()); ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); RoutingTable.Builder routingTableBuilder = RoutingTable.builder(currentState.routingTable()); - + final Version minIndexCompatibilityVersion = currentState.getNodes().getMaxNodeVersion() + .minimumIndexCompatibilityVersion(); boolean importNeeded = false; StringBuilder sb = new StringBuilder(); for (IndexMetaData indexMetaData : request.indices) { + if (indexMetaData.getCreationVersion().before(minIndexCompatibilityVersion)) { + logger.warn("ignoring dangled index [{}] on node [{}]" + + " since it's created version [{}] is not supported by at least one node in the cluster minVersion [{}]", + indexMetaData.getIndex(), request.fromNode, indexMetaData.getCreationVersion(), + minIndexCompatibilityVersion); + continue; + } if (currentState.metaData().hasIndex(indexMetaData.getIndex().getName())) { continue; } @@ -144,7 +154,8 @@ public class LocalAllocateDangledIndices extends AbstractComponent { try { // The dangled index might be from an older version, we need to make sure it's compatible // with the current version and upgrade it if needed. - upgradedIndexMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData); + upgradedIndexMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData, + minIndexCompatibilityVersion); } catch (Exception ex) { // upgrade failed - adding index as closed logger.warn((Supplier) () -> new ParameterizedMessage("found dangled index [{}] on node [{}]. This index cannot be upgraded to the latest version, adding as closed", indexMetaData.getIndex(), request.fromNode), ex); diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index e25ec31ffb0..8389335d889 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -295,9 +295,7 @@ public final class IndexModule { NIOFS, MMAPFS, SIMPLEFS, - FS, - @Deprecated - DEFAULT; + FS; public String getSettingsKey() { return this.name().toLowerCase(Locale.ROOT); diff --git a/core/src/main/java/org/elasticsearch/index/IndexSettings.java b/core/src/main/java/org/elasticsearch/index/IndexSettings.java index bc8f05275eb..dbe6d3505a4 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexSettings.java +++ b/core/src/main/java/org/elasticsearch/index/IndexSettings.java @@ -24,7 +24,6 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -40,7 +39,6 @@ import java.util.Locale; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; -import java.util.function.Predicate; /** * This class encapsulates all index level settings and handles settings updates. @@ -147,7 +145,6 @@ public final class IndexSettings { private final boolean queryStringAnalyzeWildcard; private final boolean queryStringAllowLeadingWildcard; private final boolean defaultAllowUnmappedFields; - private final Predicate indexNameMatcher; private volatile Translog.Durability durability; private final TimeValue syncInterval; private volatile TimeValue refreshInterval; @@ -214,7 +211,7 @@ public final class IndexSettings { * @param nodeSettings the nodes settings this index is allocated on. */ public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings) { - this(indexMetaData, nodeSettings, (index) -> Regex.simpleMatch(index, indexMetaData.getIndex().getName()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); + this(indexMetaData, nodeSettings, IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); } /** @@ -223,9 +220,8 @@ public final class IndexSettings { * * @param indexMetaData the index metadata this settings object is associated with * @param nodeSettings the nodes settings this index is allocated on. - * @param indexNameMatcher a matcher that can resolve an expression to the index name or index alias */ - public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, final Predicate indexNameMatcher, IndexScopedSettings indexScopedSettings) { + public IndexSettings(final IndexMetaData indexMetaData, final Settings nodeSettings, IndexScopedSettings indexScopedSettings) { scopedSettings = indexScopedSettings.copy(nodeSettings, indexMetaData); this.nodeSettings = nodeSettings; this.settings = Settings.builder().put(nodeSettings).put(indexMetaData.getSettings()).build(); @@ -243,7 +239,6 @@ public final class IndexSettings { this.queryStringAllowLeadingWildcard = QUERY_STRING_ALLOW_LEADING_WILDCARD.get(nodeSettings); this.parseFieldMatcher = new ParseFieldMatcher(settings); this.defaultAllowUnmappedFields = scopedSettings.get(ALLOW_UNMAPPED); - this.indexNameMatcher = indexNameMatcher; this.durability = scopedSettings.get(INDEX_TRANSLOG_DURABILITY_SETTING); syncInterval = INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.get(settings); refreshInterval = scopedSettings.get(INDEX_REFRESH_INTERVAL_SETTING); @@ -258,7 +253,6 @@ public final class IndexSettings { maxRefreshListeners = scopedSettings.get(MAX_REFRESH_LISTENERS_PER_SHARD); maxSlicesPerScroll = scopedSettings.get(MAX_SLICES_PER_SCROLL); this.mergePolicyConfig = new MergePolicyConfig(logger, this); - assert indexNameMatcher.test(indexMetaData.getIndex().getName()); scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING, mergePolicyConfig::setNoCFSRatio); scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_EXPUNGE_DELETES_ALLOWED_SETTING, mergePolicyConfig::setExpungeDeletesAllowed); @@ -282,7 +276,6 @@ public final class IndexSettings { scopedSettings.addSettingsUpdateConsumer(INDEX_REFRESH_INTERVAL_SETTING, this::setRefreshInterval); scopedSettings.addSettingsUpdateConsumer(MAX_REFRESH_LISTENERS_PER_SHARD, this::setMaxRefreshListeners); scopedSettings.addSettingsUpdateConsumer(MAX_SLICES_PER_SCROLL, this::setMaxSlicesPerScroll); - } private void setTranslogFlushThresholdSize(ByteSizeValue byteSizeValue) { @@ -400,13 +393,6 @@ public final class IndexSettings { */ public ParseFieldMatcher getParseFieldMatcher() { return parseFieldMatcher; } - /** - * Returns true if the given expression matches the index name or one of it's aliases - */ - public boolean matchesIndexName(String expression) { - return indexNameMatcher.test(expression); - } - /** * Updates the settings and index metadata and notifies all registered settings consumers with the new settings iff at least one setting has changed. * diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java index 6dddf6eb57f..87f9692f625 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java @@ -158,11 +158,12 @@ public final class AnalysisRegistry implements Closeable { final Map tokenFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_FILTER); Map> tokenFilters = new HashMap<>(this.tokenFilters); /* - * synonym is different than everything else since it needs access to the tokenizer factories for this index. + * synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index. * instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and * hide internal data-structures as much as possible. */ tokenFilters.put("synonym", requriesAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings))); + tokenFilters.put("synonym_graph", requriesAnalysisSettings((is, env, name, settings) -> new SynonymGraphFilterFactory(is, env, this, name, settings))); return buildMapping(false, "tokenfilter", indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.tokenFilterFactories); } @@ -213,12 +214,14 @@ public final class AnalysisRegistry implements Closeable { Settings currentSettings = tokenFilterSettings.get(tokenFilter); String typeName = currentSettings.get("type"); /* - * synonym is different than everything else since it needs access to the tokenizer factories for this index. + * synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index. * instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and * hide internal data-structures as much as possible. */ if ("synonym".equals(typeName)) { return requriesAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings)); + } else if ("synonym_graph".equals(typeName)) { + return requriesAnalysisSettings((is, env, name, settings) -> new SynonymGraphFilterFactory(is, env, this, name, settings)); } else { return getAnalysisProvider("tokenfilter", tokenFilters, tokenFilter, typeName); } diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SynonymGraphFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/SynonymGraphFilterFactory.java new file mode 100644 index 00000000000..da9b11b9785 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/analysis/SynonymGraphFilterFactory.java @@ -0,0 +1,41 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.synonym.SynonymGraphFilter; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; + +import java.io.IOException; + +public class SynonymGraphFilterFactory extends SynonymTokenFilterFactory { + public SynonymGraphFilterFactory(IndexSettings indexSettings, Environment env, AnalysisRegistry analysisRegistry, + String name, Settings settings) throws IOException { + super(indexSettings, env, analysisRegistry, name, settings); + } + + @Override + public TokenStream create(TokenStream tokenStream) { + // fst is null means no synonyms + return synonymMap.fst == null ? tokenStream : new SynonymGraphFilter(tokenStream, synonymMap, ignoreCase); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java index 11f1303328c..d32c66e0dfe 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java @@ -40,8 +40,8 @@ import java.util.List; public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { - private final SynonymMap synonymMap; - private final boolean ignoreCase; + protected final SynonymMap synonymMap; + protected final boolean ignoreCase; public SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env, AnalysisRegistry analysisRegistry, String name, Settings settings) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java index 54f15feaa74..490c837e85a 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -26,7 +26,6 @@ import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene62.Lucene62Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.mapper.CompletionFieldMapper; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; @@ -60,9 +59,6 @@ public class PerFieldMappingPostingFormatCodec extends Lucene62Codec { logger.warn("no index mapper found for field: [{}] returning default postings format", field); } else if (fieldType instanceof CompletionFieldMapper.CompletionFieldType) { return CompletionFieldMapper.CompletionFieldType.postingsFormat(); - } else if (fieldType instanceof CompletionFieldMapper2x.CompletionFieldType) { - return ((CompletionFieldMapper2x.CompletionFieldType) fieldType).postingsFormat( - super.getPostingsFormatForField(field)); } return super.getPostingsFormatForField(field); } diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index bd742ee6288..20def3bd893 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -69,7 +69,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.translog.Translog; -import javax.net.ssl.SNIServerName; import java.io.Closeable; import java.io.FileNotFoundException; import java.io.IOException; @@ -1031,14 +1030,6 @@ public abstract class Engine implements Closeable { return this.doc.routing(); } - public long timestamp() { - return this.doc.timestamp(); - } - - public long ttl() { - return this.doc.ttl(); - } - public String parent() { return this.doc.parent(); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java index 23e770121a7..b35706961ba 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -52,19 +51,14 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie * Lucene 5.4 GeoPointFieldType */ public static class GeoPointDVIndexFieldData extends AbstractGeoPointDVIndexFieldData { - final boolean indexCreatedBefore2x; - public GeoPointDVIndexFieldData(Index index, String fieldName, final boolean indexCreatedBefore2x) { + public GeoPointDVIndexFieldData(Index index, String fieldName) { super(index, fieldName); - this.indexCreatedBefore2x = indexCreatedBefore2x; } @Override public AtomicGeoPointFieldData load(LeafReaderContext context) { try { - if (indexCreatedBefore2x) { - return new GeoPointLegacyDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldName)); - } return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldName)); } catch (IOException e) { throw new IllegalStateException("Cannot load doc values", e); @@ -81,13 +75,8 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie @Override public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) { - if (indexSettings.getIndexVersionCreated().before(Version.V_2_2_0) - && fieldType.hasDocValues() == false) { - return new GeoPointArrayIndexFieldData(indexSettings, fieldType.name(), cache, breakerService); - } // Ignore breaker - return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name(), - indexSettings.getIndexVersionCreated().before(Version.V_2_2_0)); + return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name()); } } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java deleted file mode 100644 index 0627e341a04..00000000000 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayAtomicFieldData.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.fielddata.plain; - -import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.RandomAccessOrds; -import org.apache.lucene.index.SortedDocValues; -import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.Accountables; -import org.apache.lucene.util.BitSet; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.util.LongArray; -import org.elasticsearch.index.fielddata.FieldData; -import org.elasticsearch.index.fielddata.GeoPointValues; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; -import org.elasticsearch.index.fielddata.ordinals.Ordinals; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; - -public abstract class GeoPointArrayAtomicFieldData extends AbstractAtomicGeoPointFieldData { - @Override - public void close() { - } - - static class WithOrdinals extends GeoPointArrayAtomicFieldData { - private final LongArray indexedPoints; - private final Ordinals ordinals; - private final int maxDoc; - - public WithOrdinals(LongArray indexedPoints, Ordinals ordinals, int maxDoc) { - super(); - this.indexedPoints = indexedPoints; - this.ordinals = ordinals; - this.maxDoc = maxDoc; - } - - @Override - public long ramBytesUsed() { - return Integer.BYTES + indexedPoints.ramBytesUsed(); - } - - @Override - public Collection getChildResources() { - List resources = new ArrayList<>(); - resources.add(Accountables.namedAccountable("indexedPoints", indexedPoints)); - return Collections.unmodifiableList(resources); - } - - @Override - public MultiGeoPointValues getGeoPointValues() { - final RandomAccessOrds ords = ordinals.ordinals(); - final SortedDocValues singleOrds = DocValues.unwrapSingleton(ords); - final GeoPoint point = new GeoPoint(Double.NaN, Double.NaN); - if (singleOrds != null) { - final GeoPointValues values = new GeoPointValues() { - @Override - public GeoPoint get(int docID) { - final int ord = singleOrds.getOrd(docID); - if (ord >= 0) { - return point.resetFromIndexHash(indexedPoints.get(ord)); - } - return point.reset(Double.NaN, Double.NaN); - } - }; - return FieldData.singleton(values, DocValues.docsWithValue(singleOrds, maxDoc)); - } - return new MultiGeoPointValues() { - @Override - public GeoPoint valueAt(int index) { - return point.resetFromIndexHash(indexedPoints.get(ords.ordAt(index))); - } - - @Override - public void setDocument(int docId) { - ords.setDocument(docId); - } - - @Override - public int count() { - return ords.cardinality(); - } - }; - } - } - - public static class Single extends GeoPointArrayAtomicFieldData { - private final LongArray indexedPoint; - private final BitSet set; - - public Single(LongArray indexedPoint, BitSet set) { - this.indexedPoint = indexedPoint; - this.set = set; - } - - @Override - public long ramBytesUsed() { - return Integer.BYTES + indexedPoint.ramBytesUsed() - + (set == null ? 0 : set.ramBytesUsed()); - } - - @Override - public Collection getChildResources() { - List resources = new ArrayList<>(); - resources.add(Accountables.namedAccountable("indexedPoints", indexedPoint)); - if (set != null) { - resources.add(Accountables.namedAccountable("missing bitset", set)); - } - return Collections.unmodifiableList(resources); - } - - @Override - public MultiGeoPointValues getGeoPointValues() { - final GeoPoint point = new GeoPoint(); - final GeoPointValues values = new GeoPointValues() { - @Override - public GeoPoint get(int docID) { - if (set == null || set.get(docID)) { - return point.resetFromIndexHash(indexedPoint.get(docID)); - } - return point.reset(Double.NaN, Double.NaN); - } - }; - return FieldData.singleton(values, set); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java deleted file mode 100644 index 18313f32745..00000000000 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointArrayIndexFieldData.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata.plain; - -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomAccessOrds; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.util.BitSet; -import org.elasticsearch.Version; -import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.DoubleArray; -import org.elasticsearch.common.util.LongArray; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; -import org.elasticsearch.index.fielddata.FieldData; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.fielddata.ordinals.Ordinals; -import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.indices.breaker.CircuitBreakerService; - -/** - * Loads FieldData for an array of GeoPoints supporting both long encoded points and backward compatible double arrays - */ -public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData { - private final CircuitBreakerService breakerService; - - public GeoPointArrayIndexFieldData(IndexSettings indexSettings, String fieldName, - IndexFieldDataCache cache, CircuitBreakerService breakerService) { - super(indexSettings, fieldName, cache); - this.breakerService = breakerService; - } - - @Override - public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception { - LeafReader reader = context.reader(); - - Terms terms = reader.terms(getFieldName()); - AtomicGeoPointFieldData data = null; - // TODO: Use an actual estimator to estimate before loading. - NonEstimatingEstimator estimator = new NonEstimatingEstimator(breakerService.getBreaker(CircuitBreaker.FIELDDATA)); - if (terms == null) { - data = AbstractAtomicGeoPointFieldData.empty(reader.maxDoc()); - estimator.afterLoad(null, data.ramBytesUsed()); - return data; - } - return (indexSettings.getIndexVersionCreated().before(Version.V_2_2_0)) ? - loadLegacyFieldData(reader, estimator, terms, data) : loadFieldData22(reader, estimator, terms, data); - } - - /** - * long encoded geopoint field data - */ - private AtomicGeoPointFieldData loadFieldData22(LeafReader reader, NonEstimatingEstimator estimator, Terms terms, - AtomicGeoPointFieldData data) throws Exception { - LongArray indexedPoints = BigArrays.NON_RECYCLING_INSTANCE.newLongArray(128); - final float acceptableTransientOverheadRatio = OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO; - boolean success = false; - try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) { - final TermsEnum termsEnum; - final GeoPointField.TermEncoding termEncoding; - if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_2_3_0)) { - termEncoding = GeoPointField.TermEncoding.PREFIX; - termsEnum = OrdinalsBuilder.wrapGeoPointTerms(terms.iterator()); - } else { - termEncoding = GeoPointField.TermEncoding.NUMERIC; - termsEnum = OrdinalsBuilder.wrapNumeric64Bit(terms.iterator()); - } - - final GeoPointTermsEnum iter = new GeoPointTermsEnum(builder.buildFromTerms(termsEnum), termEncoding); - - Long hashedPoint; - long numTerms = 0; - while ((hashedPoint = iter.next()) != null) { - indexedPoints = BigArrays.NON_RECYCLING_INSTANCE.resize(indexedPoints, numTerms + 1); - indexedPoints.set(numTerms++, hashedPoint); - } - indexedPoints = BigArrays.NON_RECYCLING_INSTANCE.resize(indexedPoints, numTerms); - - Ordinals build = builder.build(); - RandomAccessOrds ordinals = build.ordinals(); - if (FieldData.isMultiValued(ordinals) == false) { - int maxDoc = reader.maxDoc(); - LongArray sIndexedPoint = BigArrays.NON_RECYCLING_INSTANCE.newLongArray(reader.maxDoc()); - for (int i=0; i getChildResources() { - List resources = new ArrayList<>(); - resources.add(Accountables.namedAccountable("latitude", lat)); - resources.add(Accountables.namedAccountable("longitude", lon)); - return Collections.unmodifiableList(resources); - } - - @Override - public MultiGeoPointValues getGeoPointValues() { - final RandomAccessOrds ords = ordinals.ordinals(); - final SortedDocValues singleOrds = DocValues.unwrapSingleton(ords); - if (singleOrds != null) { - final GeoPoint point = new GeoPoint(); - final GeoPointValues values = new GeoPointValues() { - @Override - public GeoPoint get(int docID) { - final int ord = singleOrds.getOrd(docID); - if (ord >= 0) { - return point.reset(lat.get(ord), lon.get(ord)); - } - return point.reset(Double.NaN, Double.NaN); - } - }; - return FieldData.singleton(values, DocValues.docsWithValue(singleOrds, maxDoc)); - } else { - final GeoPoint point = new GeoPoint(); - return new MultiGeoPointValues() { - - @Override - public GeoPoint valueAt(int index) { - final long ord = ords.ordAt(index); - if (ord >= 0) { - return point.reset(lat.get(ord), lon.get(ord)); - } - return point.reset(Double.NaN, Double.NaN); - } - - @Override - public void setDocument(int docId) { - ords.setDocument(docId); - } - - @Override - public int count() { - return ords.cardinality(); - } - }; - } - } - } - - /** - * Assumes unset values are marked in bitset, and docId is used as the index to the value array. - */ - public static class Single extends GeoPointArrayLegacyAtomicFieldData { - - private final DoubleArray lon, lat; - private final BitSet set; - - public Single(DoubleArray lon, DoubleArray lat, BitSet set) { - this.lon = lon; - this.lat = lat; - this.set = set; - } - - @Override - public long ramBytesUsed() { - return Integer.BYTES + lon.ramBytesUsed() + lat.ramBytesUsed() + (set == null ? 0 : set.ramBytesUsed()); - } - - @Override - public Collection getChildResources() { - List resources = new ArrayList<>(); - resources.add(Accountables.namedAccountable("latitude", lat)); - resources.add(Accountables.namedAccountable("longitude", lon)); - if (set != null) { - resources.add(Accountables.namedAccountable("missing bitset", set)); - } - return Collections.unmodifiableList(resources); - } - - @Override - public MultiGeoPointValues getGeoPointValues() { - final GeoPoint point = new GeoPoint(); - final GeoPointValues values = new GeoPointValues() { - @Override - public GeoPoint get(int docID) { - if (set == null || set.get(docID)) { - return point.reset(lat.get(docID), lon.get(docID)); - } - return point.reset(Double.NaN, Double.NaN); - } - }; - return FieldData.singleton(values, set); - } - } - -} \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointLegacyDVAtomicFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointLegacyDVAtomicFieldData.java deleted file mode 100644 index c51f2b96982..00000000000 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointLegacyDVAtomicFieldData.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata.plain; - -import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.RamUsageEstimator; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.util.ByteUtils; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - -final class GeoPointLegacyDVAtomicFieldData extends AbstractAtomicGeoPointFieldData { - - private static final int COORDINATE_SIZE = 8; // number of bytes per coordinate - private static final int GEOPOINT_SIZE = COORDINATE_SIZE * 2; // lat + lon - - private final BinaryDocValues values; - - GeoPointLegacyDVAtomicFieldData(BinaryDocValues values) { - super(); - this.values = values; - } - - @Override - public long ramBytesUsed() { - return 0; // not exposed by Lucene - } - - @Override - public Collection getChildResources() { - return Collections.emptyList(); - } - - @Override - public void close() { - // no-op - } - - @Override - public MultiGeoPointValues getGeoPointValues() { - return new MultiGeoPointValues() { - - int count; - GeoPoint[] points = new GeoPoint[0]; - - @Override - public void setDocument(int docId) { - final BytesRef bytes = values.get(docId); - assert bytes.length % GEOPOINT_SIZE == 0; - count = (bytes.length >>> 4); - if (count > points.length) { - final int previousLength = points.length; - points = Arrays.copyOf(points, ArrayUtil.oversize(count, RamUsageEstimator.NUM_BYTES_OBJECT_REF)); - for (int i = previousLength; i < points.length; ++i) { - points[i] = new GeoPoint(Double.NaN, Double.NaN); - } - } - for (int i = 0; i < count; ++i) { - final double lat = ByteUtils.readDoubleLE(bytes.bytes, bytes.offset + i * GEOPOINT_SIZE); - final double lon = ByteUtils.readDoubleLE(bytes.bytes, bytes.offset + i * GEOPOINT_SIZE + COORDINATE_SIZE); - points[i].reset(lat, lon); - } - } - - @Override - public int count() { - return count; - } - - @Override - public GeoPoint valueAt(int index) { - return points[index]; - } - - }; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java b/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java index 1316183f862..372e7caf921 100644 --- a/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java +++ b/core/src/main/java/org/elasticsearch/index/fieldvisitor/FieldsVisitor.java @@ -28,8 +28,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.UidFieldMapper; @@ -52,8 +50,6 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet; public class FieldsVisitor extends StoredFieldVisitor { private static final Set BASE_REQUIRED_FIELDS = unmodifiableSet(newHashSet( UidFieldMapper.NAME, - TimestampFieldMapper.NAME, - TTLFieldMapper.NAME, RoutingFieldMapper.NAME, ParentFieldMapper.NAME)); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java index 90d9fbda1b5..49b8fb085d6 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/AllFieldMapper.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.mapper; -import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -208,7 +208,7 @@ public class AllFieldMapper extends MetadataFieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { if (!enabledState.enabled) { return; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java index fa82176c6ab..c4f9e7a1cb3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/BaseGeoPointFieldMapper.java @@ -21,14 +21,12 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.elasticsearch.common.geo.GeoHashUtils; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.logging.DeprecationLogger; @@ -45,7 +43,6 @@ import org.elasticsearch.search.DocValueFormat; import org.joda.time.DateTimeZone; import java.io.IOException; -import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -77,16 +74,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr public abstract static class Builder extends FieldMapper.Builder { - protected boolean enableLatLon = Defaults.ENABLE_LATLON; - - protected Integer precisionStep; - - protected boolean enableGeoHash = Defaults.ENABLE_GEOHASH; - - protected boolean enableGeoHashPrefix = Defaults.ENABLE_GEOHASH_PREFIX; - - protected int geoHashPrecision = Defaults.GEO_HASH_PRECISION; - protected Boolean ignoreMalformed; public Builder(String name, MappedFieldType fieldType) { @@ -98,31 +85,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr return (GeoPointFieldType)fieldType; } - public T enableLatLon(boolean enableLatLon) { - this.enableLatLon = enableLatLon; - return builder; - } - - public T precisionStep(int precisionStep) { - this.precisionStep = precisionStep; - return builder; - } - - public T enableGeoHash(boolean enableGeoHash) { - this.enableGeoHash = enableGeoHash; - return builder; - } - - public T geoHashPrefix(boolean enableGeoHashPrefix) { - this.enableGeoHashPrefix = enableGeoHashPrefix; - return builder; - } - - public T geoHashPrecision(int precision) { - this.geoHashPrecision = precision; - return builder; - } - public T ignoreMalformed(boolean ignoreMalformed) { this.ignoreMalformed = ignoreMalformed; return builder; @@ -143,112 +105,21 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr FieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo); public Y build(Mapper.BuilderContext context) { - // version 5.0 cuts over to LatLonPoint and no longer indexes geohash, or lat/lon separately - if (context.indexCreatedVersion().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - return buildLegacy(context); - } return build(context, name, fieldType, defaultFieldType, context.indexSettings(), null, null, null, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo); } - - private Y buildLegacy(Mapper.BuilderContext context) { - LegacyGeoPointFieldType geoPointFieldType = (LegacyGeoPointFieldType)fieldType; - - FieldMapper latMapper = null; - FieldMapper lonMapper = null; - - context.path().add(name); - if (enableLatLon) { - if (context.indexCreatedVersion().before(Version.V_5_0_0_alpha2)) { - LegacyNumberFieldMapper.Builder latMapperBuilder = new LegacyDoubleFieldMapper.Builder(Names.LAT).includeInAll(false); - LegacyNumberFieldMapper.Builder lonMapperBuilder = new LegacyDoubleFieldMapper.Builder(Names.LON).includeInAll(false); - if (precisionStep != null) { - latMapperBuilder.precisionStep(precisionStep); - lonMapperBuilder.precisionStep(precisionStep); - } - latMapper = (LegacyDoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); - lonMapper = (LegacyDoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context); - } else { - latMapper = new NumberFieldMapper.Builder(Names.LAT, NumberFieldMapper.NumberType.DOUBLE) - .includeInAll(false).store(fieldType.stored()).docValues(false).build(context); - lonMapper = new NumberFieldMapper.Builder(Names.LON, NumberFieldMapper.NumberType.DOUBLE) - .includeInAll(false).store(fieldType.stored()).docValues(false).build(context); - } - geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType()); - } - FieldMapper geoHashMapper = null; - if (enableGeoHash || enableGeoHashPrefix) { - // TODO: possible also implicitly enable geohash if geohash precision is set - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha1)) { - geoHashMapper = new KeywordFieldMapper.Builder(Names.GEOHASH) - .index(true).includeInAll(false).store(fieldType.stored()).build(context); - } else { - geoHashMapper = new StringFieldMapper.Builder(Names.GEOHASH) - .tokenized(false).index(true).omitNorms(true).indexOptions(IndexOptions.DOCS) - .includeInAll(false).store(fieldType.stored()).build(context); - } - geoPointFieldType.setGeoHashEnabled(geoHashMapper.fieldType(), geoHashPrecision, enableGeoHashPrefix); - } - context.path().remove(); - - return build(context, name, fieldType, defaultFieldType, context.indexSettings(), - latMapper, lonMapper, geoHashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo); - } } public abstract static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder; - Version indexVersionCreated = parserContext.indexVersionCreated(); - if (indexVersionCreated.before(Version.V_2_2_0)) { - builder = new LegacyGeoPointFieldMapper.Builder(name); - } else if (indexVersionCreated.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - builder = new LatLonPointFieldMapper.Builder(name); - } else { - builder = new GeoPointFieldMapper.Builder(name); - } + Builder builder = new LatLonPointFieldMapper.Builder(name); parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); String propName = entry.getKey(); Object propNode = entry.getValue(); - if (indexVersionCreated.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - if (propName.equals("lat_lon")) { - deprecationLogger.deprecated(CONTENT_TYPE + " lat_lon parameter is deprecated and will be removed " - + "in the next major release"); - builder.enableLatLon(XContentMapValues.lenientNodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("precision_step")) { - deprecationLogger.deprecated(CONTENT_TYPE + " precision_step parameter is deprecated and will be removed " - + "in the next major release"); - builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode)); - iterator.remove(); - } else if (propName.equals("geohash")) { - deprecationLogger.deprecated(CONTENT_TYPE + " geohash parameter is deprecated and will be removed " - + "in the next major release"); - builder.enableGeoHash(XContentMapValues.lenientNodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("geohash_prefix")) { - deprecationLogger.deprecated(CONTENT_TYPE + " geohash_prefix parameter is deprecated and will be removed " - + "in the next major release"); - builder.geoHashPrefix(XContentMapValues.lenientNodeBooleanValue(propNode)); - if (XContentMapValues.lenientNodeBooleanValue(propNode)) { - builder.enableGeoHash(true); - } - iterator.remove(); - } else if (propName.equals("geohash_precision")) { - deprecationLogger.deprecated(CONTENT_TYPE + " geohash_precision parameter is deprecated and will be removed " - + "in the next major release"); - if (propNode instanceof Integer) { - builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(propNode)); - } else { - builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(propNode.toString())); - } - iterator.remove(); - } - } if (propName.equals(Names.IGNORE_MALFORMED)) { builder.ignoreMalformed(XContentMapValues.lenientNodeBooleanValue(propNode)); @@ -256,19 +127,13 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } } - if (builder instanceof LegacyGeoPointFieldMapper.Builder) { - return LegacyGeoPointFieldMapper.parse((LegacyGeoPointFieldMapper.Builder) builder, node, parserContext); - } else if (builder instanceof LatLonPointFieldMapper.Builder) { - return (LatLonPointFieldMapper.Builder) builder; - } - - return (GeoPointFieldMapper.Builder) builder; + return builder; } } - public abstract static class GeoPointFieldType extends MappedFieldType { - GeoPointFieldType() { - } + public static class GeoPointFieldType extends MappedFieldType { + + GeoPointFieldType() {} GeoPointFieldType(GeoPointFieldType ref) { super(ref); @@ -278,110 +143,10 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr public String typeName() { return CONTENT_TYPE; } - } - - public static class LegacyGeoPointFieldType extends GeoPointFieldType { - protected MappedFieldType geoHashFieldType; - protected int geoHashPrecision; - protected boolean geoHashPrefixEnabled; - - protected MappedFieldType latFieldType; - protected MappedFieldType lonFieldType; - - LegacyGeoPointFieldType() {} - - LegacyGeoPointFieldType(LegacyGeoPointFieldType ref) { - super(ref); - this.geoHashFieldType = ref.geoHashFieldType; // copying ref is ok, this can never be modified - this.geoHashPrecision = ref.geoHashPrecision; - this.geoHashPrefixEnabled = ref.geoHashPrefixEnabled; - this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified - this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified - } @Override public MappedFieldType clone() { - return new LegacyGeoPointFieldType(this); - } - - @Override - public boolean equals(Object o) { - if (!super.equals(o)) return false; - LegacyGeoPointFieldType that = (LegacyGeoPointFieldType) o; - return geoHashPrecision == that.geoHashPrecision && - geoHashPrefixEnabled == that.geoHashPrefixEnabled && - java.util.Objects.equals(geoHashFieldType, that.geoHashFieldType) && - java.util.Objects.equals(latFieldType, that.latFieldType) && - java.util.Objects.equals(lonFieldType, that.lonFieldType); - } - - @Override - public int hashCode() { - return java.util.Objects.hash(super.hashCode(), geoHashFieldType, geoHashPrecision, geoHashPrefixEnabled, latFieldType, - lonFieldType); - } - - @Override - public void checkCompatibility(MappedFieldType fieldType, List conflicts, boolean strict) { - super.checkCompatibility(fieldType, conflicts, strict); - LegacyGeoPointFieldType other = (LegacyGeoPointFieldType)fieldType; - if (isLatLonEnabled() != other.isLatLonEnabled()) { - conflicts.add("mapper [" + name() + "] has different [lat_lon]"); - } - if (isLatLonEnabled() && other.isLatLonEnabled() && - latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) { - conflicts.add("mapper [" + name() + "] has different [precision_step]"); - } - if (isGeoHashEnabled() != other.isGeoHashEnabled()) { - conflicts.add("mapper [" + name() + "] has different [geohash]"); - } - if (geoHashPrecision() != other.geoHashPrecision()) { - conflicts.add("mapper [" + name() + "] has different [geohash_precision]"); - } - if (isGeoHashPrefixEnabled() != other.isGeoHashPrefixEnabled()) { - conflicts.add("mapper [" + name() + "] has different [geohash_prefix]"); - } - } - - public boolean isGeoHashEnabled() { - return geoHashFieldType != null; - } - - public MappedFieldType geoHashFieldType() { - return geoHashFieldType; - } - - public int geoHashPrecision() { - return geoHashPrecision; - } - - public boolean isGeoHashPrefixEnabled() { - return geoHashPrefixEnabled; - } - - public void setGeoHashEnabled(MappedFieldType geoHashFieldType, int geoHashPrecision, boolean geoHashPrefixEnabled) { - checkIfFrozen(); - this.geoHashFieldType = geoHashFieldType; - this.geoHashPrecision = geoHashPrecision; - this.geoHashPrefixEnabled = geoHashPrefixEnabled; - } - - public boolean isLatLonEnabled() { - return latFieldType != null; - } - - public MappedFieldType latFieldType() { - return latFieldType; - } - - public MappedFieldType lonFieldType() { - return lonFieldType; - } - - public void setLatLonEnabled(MappedFieldType latFieldType, MappedFieldType lonFieldType) { - checkIfFrozen(); - this.latFieldType = latFieldType; - this.lonFieldType = lonFieldType; + return new GeoPointFieldType(this); } @Override @@ -407,30 +172,15 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } } - protected FieldMapper latMapper; - - protected FieldMapper lonMapper; - - protected FieldMapper geoHashMapper; - protected Explicit ignoreMalformed; protected BaseGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, FieldMapper latMapper, FieldMapper lonMapper, FieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - this.latMapper = latMapper; - this.lonMapper = lonMapper; - this.geoHashMapper = geoHashMapper; this.ignoreMalformed = ignoreMalformed; } - - - public LegacyGeoPointFieldType legacyFieldType() { - return (LegacyGeoPointFieldType) super.fieldType(); - } - @Override protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { super.doMerge(mergeWith, updateAllTypes); @@ -440,47 +190,17 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr } } - @Override - public Iterator iterator() { - if (this instanceof LatLonPointFieldMapper == false) { - return Iterators.concat(super.iterator(), legacyIterator()); - } - return super.iterator(); - } - - public Iterator legacyIterator() { - List extras = new ArrayList<>(); - if (legacyFieldType().isGeoHashEnabled()) { - extras.add(geoHashMapper); - } - if (legacyFieldType().isLatLonEnabled()) { - extras.add(latMapper); - extras.add(lonMapper); - } - return extras.iterator(); - } - @Override protected String contentType() { return CONTENT_TYPE; } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called"); } protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException { - if (legacyFieldType().isGeoHashEnabled()) { - if (geoHash == null) { - geoHash = GeoHashUtils.stringEncode(point.lon(), point.lat()); - } - addGeoHashField(context, geoHash); - } - if (legacyFieldType().isLatLonEnabled()) { - latMapper.parse(context.createExternalValueContext(point.lat())); - lonMapper.parse(context.createExternalValueContext(point.lon())); - } multiFields.parse(this, context.createExternalValueContext(point)); } @@ -551,17 +271,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr return null; } - private void addGeoHashField(ParseContext context, String geoHash) throws IOException { - LegacyGeoPointFieldType ft = (LegacyGeoPointFieldType)fieldType; - int len = Math.min(ft.geoHashPrecision(), geoHash.length()); - int min = ft.isGeoHashPrefixEnabled() ? 1 : len; - - for (int i = len; i >= min; i--) { - // side effect of this call is adding the field - geoHashMapper.parse(context.createExternalValueContext(geoHash.substring(0, i))); - } - } - private void parsePointFromString(ParseContext context, GeoPoint sparse, String point) throws IOException { if (point.indexOf(',') < 0) { parse(context, sparse.resetFromGeoHash(point), point); @@ -573,51 +282,9 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr @Override protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { super.doXContentBody(builder, includeDefaults, params); - if (this instanceof LatLonPointFieldMapper == false) { - legacyDoXContentBody(builder, includeDefaults, params); - } if (includeDefaults || ignoreMalformed.explicit()) { builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); } } - protected void legacyDoXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - LegacyGeoPointFieldType ft = (LegacyGeoPointFieldType) fieldType; - if (includeDefaults || ft.isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) { - builder.field("lat_lon", ft.isLatLonEnabled()); - } - if (ft.isLatLonEnabled() && (includeDefaults || ft.latFieldType().numericPrecisionStep() != LegacyNumericUtils.PRECISION_STEP_DEFAULT)) { - builder.field("precision_step", ft.latFieldType().numericPrecisionStep()); - } - if (includeDefaults || ft.isGeoHashEnabled() != Defaults.ENABLE_GEOHASH) { - builder.field("geohash", ft.isGeoHashEnabled()); - } - if (includeDefaults || ft.isGeoHashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) { - builder.field("geohash_prefix", ft.isGeoHashPrefixEnabled()); - } - if (ft.isGeoHashEnabled() && (includeDefaults || ft.geoHashPrecision() != Defaults.GEO_HASH_PRECISION)) { - builder.field("geohash_precision", ft.geoHashPrecision()); - } - } - - @Override - public FieldMapper updateFieldType(Map fullNameToFieldType) { - BaseGeoPointFieldMapper updated = (BaseGeoPointFieldMapper) super.updateFieldType(fullNameToFieldType); - FieldMapper geoUpdated = geoHashMapper == null ? null : geoHashMapper.updateFieldType(fullNameToFieldType); - FieldMapper latUpdated = latMapper == null ? null : latMapper.updateFieldType(fullNameToFieldType); - FieldMapper lonUpdated = lonMapper == null ? null : lonMapper.updateFieldType(fullNameToFieldType); - if (updated == this - && geoUpdated == geoHashMapper - && latUpdated == latMapper - && lonUpdated == lonMapper) { - return this; - } - if (updated == this) { - updated = (BaseGeoPointFieldMapper) updated.clone(); - } - updated.geoHashMapper = geoUpdated; - updated.latMapper = latUpdated; - updated.lonMapper = lonUpdated; - return updated; - } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index 374540d03fe..c32f6219a3c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectArrayList; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.util.BytesRef; @@ -137,7 +138,7 @@ public class BinaryFieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { if (!fieldType().stored() && !fieldType().hasDocValues()) { return; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index 8e087f72f0a..fc9520beeab 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; @@ -217,7 +218,7 @@ public class BooleanFieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored() && !fieldType().hasDocValues()) { return; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 5a4edd39ac1..c499c30c607 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.document.Field; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.suggest.document.Completion50PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; @@ -114,9 +115,6 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)) { - return new CompletionFieldMapper2x.TypeParser().parse(name, node, parserContext); - } CompletionFieldMapper.Builder builder = new CompletionFieldMapper.Builder(name); NamedAnalyzer indexAnalyzer = null; NamedAnalyzer searchAnalyzer = null; @@ -590,7 +588,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { // no-op } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java b/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java deleted file mode 100644 index 73a797954af..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper2x.java +++ /dev/null @@ -1,602 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.document.Field; -import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.NumberType; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.search.suggest.completion2x.AnalyzingCompletionLookupProvider; -import org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat; -import org.elasticsearch.search.suggest.completion2x.CompletionTokenStream; -import org.elasticsearch.search.suggest.completion2x.context.ContextBuilder; -import org.elasticsearch.search.suggest.completion2x.context.ContextMapping; -import org.elasticsearch.search.suggest.completion2x.context.ContextMapping.ContextConfig; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.SortedMap; -import java.util.TreeMap; - -import static org.elasticsearch.index.mapper.TypeParsers.parseMultiField; - -public class CompletionFieldMapper2x extends FieldMapper { - - public static final String CONTENT_TYPE = "completion"; - - public static class Defaults { - public static final CompletionFieldType FIELD_TYPE = new CompletionFieldType(); - - static { - FIELD_TYPE.setOmitNorms(true); - FIELD_TYPE.freeze(); - } - - public static final boolean DEFAULT_PRESERVE_SEPARATORS = true; - public static final boolean DEFAULT_POSITION_INCREMENTS = true; - public static final boolean DEFAULT_HAS_PAYLOADS = false; - public static final int DEFAULT_MAX_INPUT_LENGTH = 50; - } - - public static class Fields { - // Mapping field names - public static final String ANALYZER = "analyzer"; - public static final ParseField SEARCH_ANALYZER = new ParseField("search_analyzer"); - public static final ParseField PRESERVE_SEPARATORS = new ParseField("preserve_separators"); - public static final ParseField PRESERVE_POSITION_INCREMENTS = new ParseField("preserve_position_increments"); - public static final String PAYLOADS = "payloads"; - public static final String TYPE = "type"; - public static final ParseField MAX_INPUT_LENGTH = new ParseField("max_input_length", "max_input_len"); - // Content field names - public static final String CONTENT_FIELD_NAME_INPUT = "input"; - public static final String CONTENT_FIELD_NAME_OUTPUT = "output"; - public static final String CONTENT_FIELD_NAME_PAYLOAD = "payload"; - public static final String CONTENT_FIELD_NAME_WEIGHT = "weight"; - public static final String CONTEXT = "context"; - } - - public static final Set ALLOWED_CONTENT_FIELD_NAMES; - static { - ALLOWED_CONTENT_FIELD_NAMES = new HashSet<>(); - ALLOWED_CONTENT_FIELD_NAMES.add(Fields.CONTENT_FIELD_NAME_INPUT); - ALLOWED_CONTENT_FIELD_NAMES.add(Fields.CONTENT_FIELD_NAME_OUTPUT); - ALLOWED_CONTENT_FIELD_NAMES.add(Fields.CONTENT_FIELD_NAME_PAYLOAD); - ALLOWED_CONTENT_FIELD_NAMES.add(Fields.CONTENT_FIELD_NAME_WEIGHT); - ALLOWED_CONTENT_FIELD_NAMES.add(Fields.CONTEXT); - } - - public static class Builder extends FieldMapper.Builder { - - private boolean preserveSeparators = Defaults.DEFAULT_PRESERVE_SEPARATORS; - private boolean payloads = Defaults.DEFAULT_HAS_PAYLOADS; - private boolean preservePositionIncrements = Defaults.DEFAULT_POSITION_INCREMENTS; - private int maxInputLength = Defaults.DEFAULT_MAX_INPUT_LENGTH; - private SortedMap contextMapping = ContextMapping.EMPTY_MAPPING; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); - builder = this; - } - - public Builder payloads(boolean payloads) { - this.payloads = payloads; - return this; - } - - public Builder preserveSeparators(boolean preserveSeparators) { - this.preserveSeparators = preserveSeparators; - return this; - } - - public Builder preservePositionIncrements(boolean preservePositionIncrements) { - this.preservePositionIncrements = preservePositionIncrements; - return this; - } - - public Builder maxInputLength(int maxInputLength) { - if (maxInputLength <= 0) { - throw new IllegalArgumentException( - Fields.MAX_INPUT_LENGTH.getPreferredName() + " must be > 0 but was [" + maxInputLength + "]"); - } - this.maxInputLength = maxInputLength; - return this; - } - - public Builder contextMapping(SortedMap contextMapping) { - this.contextMapping = contextMapping; - return this; - } - - @Override - public CompletionFieldMapper2x build(Mapper.BuilderContext context) { - setupFieldType(context); - CompletionFieldType completionFieldType = (CompletionFieldType) fieldType; - completionFieldType.setProvider( - new AnalyzingCompletionLookupProvider(preserveSeparators, preservePositionIncrements, payloads)); - completionFieldType.setContextMapping(contextMapping); - return new CompletionFieldMapper2x(name, fieldType, maxInputLength, context.indexSettings(), - multiFieldsBuilder.build(this, context), copyTo); - } - - } - - public static class TypeParser implements Mapper.TypeParser { - - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) - throws MapperParsingException { - CompletionFieldMapper2x.Builder builder = new Builder(name); - NamedAnalyzer indexAnalyzer = null; - NamedAnalyzer searchAnalyzer = null; - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext(); ) { - Map.Entry entry = iterator.next(); - String fieldName = entry.getKey(); - Object fieldNode = entry.getValue(); - if (fieldName.equals("type")) { - continue; - } - if (Fields.ANALYZER.equals(fieldName) || // index_analyzer is for backcompat, remove for v3.0 - fieldName.equals("index_analyzer") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - - indexAnalyzer = getNamedAnalyzer(parserContext, fieldNode.toString()); - iterator.remove(); - } else if (parserContext.parseFieldMatcher().match(fieldName, Fields.SEARCH_ANALYZER)) { - searchAnalyzer = getNamedAnalyzer(parserContext, fieldNode.toString()); - iterator.remove(); - } else if (fieldName.equals(Fields.PAYLOADS)) { - builder.payloads(Boolean.parseBoolean(fieldNode.toString())); - iterator.remove(); - } else if (parserContext.parseFieldMatcher().match(fieldName, Fields.PRESERVE_SEPARATORS)) { - builder.preserveSeparators(Boolean.parseBoolean(fieldNode.toString())); - iterator.remove(); - } else if (parserContext.parseFieldMatcher().match(fieldName, Fields.PRESERVE_POSITION_INCREMENTS)) { - builder.preservePositionIncrements(Boolean.parseBoolean(fieldNode.toString())); - iterator.remove(); - } else if (parserContext.parseFieldMatcher().match(fieldName, Fields.MAX_INPUT_LENGTH)) { - builder.maxInputLength(Integer.parseInt(fieldNode.toString())); - iterator.remove(); - } else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) { - iterator.remove(); - } else if (fieldName.equals(Fields.CONTEXT)) { - builder.contextMapping(ContextBuilder.loadMappings(fieldNode, parserContext.indexVersionCreated())); - iterator.remove(); - } - } - - if (indexAnalyzer == null) { - if (searchAnalyzer != null) { - throw new MapperParsingException( - "analyzer on completion field [" + name + "] must be set when search_analyzer is set"); - } - indexAnalyzer = searchAnalyzer = parserContext.getIndexAnalyzers().get("simple"); - } else if (searchAnalyzer == null) { - searchAnalyzer = indexAnalyzer; - } - builder.indexAnalyzer(indexAnalyzer); - builder.searchAnalyzer(searchAnalyzer); - - return builder; - } - - private NamedAnalyzer getNamedAnalyzer(ParserContext parserContext, String name) { - NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(name); - if (analyzer == null) { - throw new IllegalArgumentException("Can't find default or mapped analyzer with name [" + name + "]"); - } - return analyzer; - } - } - - public static final class CompletionFieldType extends TermBasedFieldType { - private PostingsFormat postingsFormat; - private AnalyzingCompletionLookupProvider analyzingSuggestLookupProvider; - private SortedMap contextMapping = ContextMapping.EMPTY_MAPPING; - - public CompletionFieldType() { - } - - protected CompletionFieldType(CompletionFieldType ref) { - super(ref); - this.postingsFormat = ref.postingsFormat; - this.analyzingSuggestLookupProvider = ref.analyzingSuggestLookupProvider; - this.contextMapping = ref.contextMapping; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (!(o instanceof CompletionFieldType)) return false; - if (!super.equals(o)) return false; - CompletionFieldType fieldType = (CompletionFieldType) o; - return analyzingSuggestLookupProvider.getPreserveSep() == fieldType.analyzingSuggestLookupProvider.getPreserveSep() - && analyzingSuggestLookupProvider.getPreservePositionsIncrements() == - fieldType.analyzingSuggestLookupProvider.getPreservePositionsIncrements() && - analyzingSuggestLookupProvider.hasPayloads() == fieldType.analyzingSuggestLookupProvider.hasPayloads() && - Objects.equals(getContextMapping(), fieldType.getContextMapping()); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), - analyzingSuggestLookupProvider.getPreserveSep(), - analyzingSuggestLookupProvider.getPreservePositionsIncrements(), - analyzingSuggestLookupProvider.hasPayloads(), - getContextMapping()); - } - - @Override - public CompletionFieldType clone() { - return new CompletionFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public void checkCompatibility(MappedFieldType fieldType, List conflicts, boolean strict) { - super.checkCompatibility(fieldType, conflicts, strict); - CompletionFieldType other = (CompletionFieldType) fieldType; - if (analyzingSuggestLookupProvider.hasPayloads() != other.analyzingSuggestLookupProvider.hasPayloads()) { - conflicts.add("mapper [" + name() + "] has different [payload] values"); - } - if (analyzingSuggestLookupProvider.getPreservePositionsIncrements() != - other.analyzingSuggestLookupProvider.getPreservePositionsIncrements()) { - conflicts.add("mapper [" + name() + "] has different [preserve_position_increments] values"); - } - if (analyzingSuggestLookupProvider.getPreserveSep() != other.analyzingSuggestLookupProvider.getPreserveSep()) { - conflicts.add("mapper [" + name() + "] has different [preserve_separators] values"); - } - if (!ContextMapping.mappingsAreEqual(getContextMapping(), other.getContextMapping())) { - conflicts.add("mapper [" + name() + "] has different [context_mapping] values"); - } - } - - public void setProvider(AnalyzingCompletionLookupProvider provider) { - checkIfFrozen(); - this.analyzingSuggestLookupProvider = provider; - } - - public synchronized PostingsFormat postingsFormat(PostingsFormat in) { - if (in instanceof Completion090PostingsFormat) { - throw new IllegalStateException("Double wrapping of " + Completion090PostingsFormat.class); - } - if (postingsFormat == null) { - postingsFormat = new Completion090PostingsFormat(in, analyzingSuggestLookupProvider); - } - return postingsFormat; - } - - public void setContextMapping(SortedMap contextMapping) { - checkIfFrozen(); - this.contextMapping = contextMapping; - } - - /** - * Get the context mapping associated with this completion field - */ - public SortedMap getContextMapping() { - return contextMapping; - } - - /** - * @return true if a context mapping has been defined - */ - public boolean requiresContext() { - return contextMapping.isEmpty() == false; - } - } - - private static final BytesRef EMPTY = new BytesRef(); - - private int maxInputLength; - - public CompletionFieldMapper2x(String simpleName, MappedFieldType fieldType, int maxInputLength, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo); - this.maxInputLength = maxInputLength; - } - - @Override - public CompletionFieldType fieldType() { - return (CompletionFieldType) super.fieldType(); - } - - /** - * Parses and indexes inputs - * Parsing: - * Acceptable format: - * "STRING" - interpreted as field value (input) - * "ARRAY" - each element can be one of "OBJECT" (see below) - * "OBJECT" - { "input": STRING|ARRAY, "weight": STRING|INT, "contexts": ARRAY|OBJECT } - */ - @Override - public Mapper parse(ParseContext context) throws IOException { - XContentParser parser = context.parser(); - XContentParser.Token token = parser.currentToken(); - if (token == XContentParser.Token.VALUE_NULL) { - throw new MapperParsingException("completion field [" + fieldType().name() + "] does not support null values"); - } - - String surfaceForm = null; - BytesRef payload = null; - long weight = -1; - List inputs = new ArrayList<>(4); - - SortedMap contextConfig = null; - - if (token == XContentParser.Token.VALUE_STRING) { - inputs.add(parser.text()); - multiFields.parse(this, context); - } else { - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - if (!ALLOWED_CONTENT_FIELD_NAMES.contains(currentFieldName)) { - throw new IllegalArgumentException( - "Unknown field name[" + currentFieldName + "], must be one of " + ALLOWED_CONTENT_FIELD_NAMES); - } - } else if (Fields.CONTEXT.equals(currentFieldName)) { - SortedMap configs = new TreeMap<>(); - if (token == Token.START_OBJECT) { - while ((token = parser.nextToken()) != Token.END_OBJECT) { - String name = parser.currentName(); - ContextMapping mapping = fieldType().getContextMapping().get(name); - if (mapping == null) { - throw new ElasticsearchParseException("context [{}] is not defined", name); - } else { - token = parser.nextToken(); - configs.put(name, mapping.parseContext(context, parser)); - } - } - contextConfig = new TreeMap<>(); - for (ContextMapping mapping : fieldType().getContextMapping().values()) { - ContextConfig config = configs.get(mapping.name()); - contextConfig.put(mapping.name(), config == null ? mapping.defaultConfig() : config); - } - } else { - throw new ElasticsearchParseException("context must be an object"); - } - } else if (Fields.CONTENT_FIELD_NAME_PAYLOAD.equals(currentFieldName)) { - if (!isStoringPayloads()) { - throw new MapperException("Payloads disabled in mapping"); - } - if (token == XContentParser.Token.START_OBJECT) { - XContentBuilder payloadBuilder = - XContentFactory.contentBuilder(parser.contentType()).copyCurrentStructure(parser); - payload = payloadBuilder.bytes().toBytesRef(); - payloadBuilder.close(); - } else if (token.isValue()) { - payload = parser.utf8BytesOrNull(); - } else { - throw new MapperException("payload doesn't support type " + token); - } - } else if (token == XContentParser.Token.VALUE_STRING) { - if (Fields.CONTENT_FIELD_NAME_OUTPUT.equals(currentFieldName)) { - surfaceForm = parser.text(); - } - if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) { - inputs.add(parser.text()); - } - if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) { - Number weightValue; - try { - weightValue = Long.parseLong(parser.text()); - } catch (NumberFormatException e) { - throw new IllegalArgumentException( - "Weight must be a string representing a numeric value, but was [" + parser.text() + "]"); - } - weight = weightValue.longValue(); // always parse a long to make sure we don't get overflow - checkWeight(weight); - } - } else if (token == XContentParser.Token.VALUE_NUMBER) { - if (Fields.CONTENT_FIELD_NAME_WEIGHT.equals(currentFieldName)) { - NumberType numberType = parser.numberType(); - if (NumberType.LONG != numberType && NumberType.INT != numberType) { - throw new IllegalArgumentException( - "Weight must be an integer, but was [" + parser.numberValue() + "]"); - } - weight = parser.longValue(); // always parse a long to make sure we don't get overflow - checkWeight(weight); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (Fields.CONTENT_FIELD_NAME_INPUT.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - inputs.add(parser.text()); - } - } - } - } - } - - if (contextConfig == null) { - contextConfig = new TreeMap<>(); - for (ContextMapping mapping : fieldType().getContextMapping().values()) { - contextConfig.put(mapping.name(), mapping.defaultConfig()); - } - } - - final ContextMapping.Context ctx = new ContextMapping.Context(contextConfig, context.doc()); - - payload = payload == null ? EMPTY : payload; - if (surfaceForm == null) { // no surface form use the input - for (String input : inputs) { - if (input.length() == 0) { - continue; - } - BytesRef suggestPayload = fieldType().analyzingSuggestLookupProvider.buildPayload(new BytesRef( - input), weight, payload); - context.doc().add(getCompletionField(ctx, input, suggestPayload)); - } - } else { - BytesRef suggestPayload = fieldType().analyzingSuggestLookupProvider.buildPayload(new BytesRef( - surfaceForm), weight, payload); - for (String input : inputs) { - if (input.length() == 0) { - continue; - } - context.doc().add(getCompletionField(ctx, input, suggestPayload)); - } - } - return null; - } - - private void checkWeight(long weight) { - if (weight < 0 || weight > Integer.MAX_VALUE) { - throw new IllegalArgumentException("Weight must be in the interval [0..2147483647], but was [" + weight + "]"); - } - } - - public Field getCompletionField(ContextMapping.Context ctx, String input, BytesRef payload) { - final String originalInput = input; - if (input.length() > maxInputLength) { - final int len = correctSubStringLen(input, Math.min(maxInputLength, input.length())); - input = input.substring(0, len); - } - for (int i = 0; i < input.length(); i++) { - if (isReservedChar(input.charAt(i))) { - throw new IllegalArgumentException("Illegal input [" + originalInput + "] UTF-16 codepoint [0x" - + Integer.toHexString(input.charAt(i)).toUpperCase(Locale.ROOT) - + "] at position " + i + " is a reserved character"); - } - } - return new SuggestField( - fieldType().name(), ctx, input, fieldType(), payload, fieldType().analyzingSuggestLookupProvider); - } - - public static int correctSubStringLen(String input, int len) { - if (Character.isHighSurrogate(input.charAt(len - 1))) { - assert input.length() >= len + 1 && Character.isLowSurrogate(input.charAt(len)); - return len + 1; - } - return len; - } - - public BytesRef buildPayload(BytesRef surfaceForm, long weight, BytesRef payload) throws IOException { - return fieldType().analyzingSuggestLookupProvider.buildPayload(surfaceForm, weight, payload); - } - - private static final class SuggestField extends Field { - private final BytesRef payload; - private final CompletionTokenStream.ToFiniteStrings toFiniteStrings; - private final ContextMapping.Context ctx; - - public SuggestField(String name, ContextMapping.Context ctx, - String value, MappedFieldType type, BytesRef payload, - CompletionTokenStream.ToFiniteStrings toFiniteStrings) { - super(name, value, type); - this.payload = payload; - this.toFiniteStrings = toFiniteStrings; - this.ctx = ctx; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - TokenStream ts = ctx.wrapTokenStream(super.tokenStream(analyzer, previous)); - return new CompletionTokenStream(ts, payload, toFiniteStrings); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(simpleName()) - .field(Fields.TYPE, CONTENT_TYPE); - - builder.field(Fields.ANALYZER, fieldType().indexAnalyzer().name()); - if (fieldType().indexAnalyzer().name().equals(fieldType().searchAnalyzer().name()) == false) { - builder.field(Fields.SEARCH_ANALYZER.getPreferredName(), fieldType().searchAnalyzer().name()); - } - builder.field(Fields.PAYLOADS, fieldType().analyzingSuggestLookupProvider.hasPayloads()); - builder.field(Fields.PRESERVE_SEPARATORS.getPreferredName(), - fieldType().analyzingSuggestLookupProvider.getPreserveSep()); - builder.field(Fields.PRESERVE_POSITION_INCREMENTS.getPreferredName(), - fieldType().analyzingSuggestLookupProvider.getPreservePositionsIncrements()); - builder.field(Fields.MAX_INPUT_LENGTH.getPreferredName(), this.maxInputLength); - multiFields.toXContent(builder, params); - - if (fieldType().requiresContext()) { - builder.startObject(Fields.CONTEXT); - for (ContextMapping mapping : fieldType().getContextMapping().values()) { - builder.value(mapping); - } - builder.endObject(); - } - - return builder.endObject(); - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - public boolean isStoringPayloads() { - return fieldType().analyzingSuggestLookupProvider.hasPayloads(); - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - super.doMerge(mergeWith, updateAllTypes); - CompletionFieldMapper2x fieldMergeWith = (CompletionFieldMapper2x) mergeWith; - this.maxInputLength = fieldMergeWith.maxInputLength; - } - - // this should be package private but our tests don't allow it. - public static boolean isReservedChar(char character) { - /* we use 0x001F as a SEP_LABEL in the suggester but we can use the UTF-16 representation since they - * are equivalent. We also don't need to convert the input character to UTF-8 here to check for - * the 0x00 end label since all multi-byte UTF-8 chars start with 0x10 binary so if the UTF-16 CP is == 0x00 - * it's the single byte UTF-8 CP */ - assert XAnalyzingSuggester.PAYLOAD_SEP == XAnalyzingSuggester.SEP_LABEL; // ensure they are the same! - switch (character) { - case XAnalyzingSuggester.END_BYTE: - case XAnalyzingSuggester.SEP_LABEL: - case XAnalyzingSuggester.HOLE_CHARACTER: - case ContextMapping.SEPARATOR: - return true; - default: - return false; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 9c76d852883..3681e4a55bc 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -19,17 +19,16 @@ package org.elasticsearch.index.mapper; -import org.apache.lucene.document.Field; import org.apache.lucene.document.StoredField; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Nullable; @@ -42,7 +41,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper.Defaults; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; @@ -63,6 +61,10 @@ public class DateFieldMapper extends FieldMapper { public static final FormatDateTimeFormatter DEFAULT_DATE_TIME_FORMATTER = Joda.forPattern( "strict_date_optional_time||epoch_millis", Locale.ROOT); + public static class Defaults { + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); + } + public static class Builder extends FieldMapper.Builder { private Boolean ignoreMalformed; @@ -128,9 +130,6 @@ public class DateFieldMapper extends FieldMapper { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) { - return new LegacyDateFieldMapper.TypeParser().parse(name, node, parserContext); - } Builder builder = new Builder(name); TypeParsers.parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { @@ -314,8 +313,7 @@ public class DateFieldMapper extends FieldMapper { @Override public Relation isFieldWithinQuery(IndexReader reader, - Object from, Object to, - boolean includeLower, boolean includeUpper, + Object from, Object to, boolean includeLower, boolean includeUpper, DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException { if (dateParser == null) { dateParser = this.dateMathParser; @@ -425,7 +423,7 @@ public class DateFieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { String dateAsString; if (context.externalValueSet()) { Object dateAsObject = context.externalValue(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index b23c189e5bd..c7ee704de4f 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -235,14 +235,6 @@ public class DocumentMapper implements ToXContent { return metadataMapper(ParentFieldMapper.class); } - public TimestampFieldMapper timestampFieldMapper() { - return metadataMapper(TimestampFieldMapper.class); - } - - public TTLFieldMapper TTLFieldMapper() { - return metadataMapper(TTLFieldMapper.class); - } - public IndexFieldMapper IndexFieldMapper() { return metadataMapper(IndexFieldMapper.class); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java index dd7335e1831..324b34fef9c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DynamicTemplate.XContentFieldType; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; -import org.elasticsearch.index.mapper.StringFieldMapper.StringFieldType; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import java.io.IOException; @@ -153,8 +152,6 @@ final class DocumentParser { context.sourceToParse().id(), context.sourceToParse().type(), source.routing(), - source.timestamp(), - source.ttl(), context.docs(), context.sourceToParse().source(), update @@ -636,35 +633,19 @@ final class DocumentParser { } private static Mapper.Builder newLongBuilder(String name, Version indexCreated) { - if (indexCreated.onOrAfter(Version.V_5_0_0_alpha2)) { - return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.LONG); - } else { - return new LegacyLongFieldMapper.Builder(name); - } + return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.LONG); } private static Mapper.Builder newFloatBuilder(String name, Version indexCreated) { - if (indexCreated.onOrAfter(Version.V_5_0_0_alpha2)) { - return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.FLOAT); - } else { - return new LegacyFloatFieldMapper.Builder(name); - } + return new NumberFieldMapper.Builder(name, NumberFieldMapper.NumberType.FLOAT); } private static Mapper.Builder newDateBuilder(String name, FormatDateTimeFormatter dateTimeFormatter, Version indexCreated) { - if (indexCreated.onOrAfter(Version.V_5_0_0_alpha2)) { - DateFieldMapper.Builder builder = new DateFieldMapper.Builder(name); - if (dateTimeFormatter != null) { - builder.dateTimeFormatter(dateTimeFormatter); - } - return builder; - } else { - LegacyDateFieldMapper.Builder builder = new LegacyDateFieldMapper.Builder(name); - if (dateTimeFormatter != null) { - builder.dateTimeFormatter(dateTimeFormatter); - } - return builder; + DateFieldMapper.Builder builder = new DateFieldMapper.Builder(name); + if (dateTimeFormatter != null) { + builder.dateTimeFormatter(dateTimeFormatter); } + return builder; } private static Mapper.Builder createBuilderFromDynamicValue(final ParseContext context, XContentParser.Token token, String currentFieldName) throws IOException { @@ -859,7 +840,8 @@ final class DocumentParser { Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); mapper = (ObjectMapper) builder.build(builderContext); if (mapper.nested() != ObjectMapper.Nested.NO) { - throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + "]) through `copy_to`"); + throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + + "]) through `copy_to` or dots in field names"); } context.addDynamicMapper(mapper); break; @@ -909,6 +891,11 @@ final class DocumentParser { return null; } objectMapper = (ObjectMapper)mapper; + if (objectMapper.nested().isNested()) { + throw new MapperParsingException("Cannot add a value for field [" + + fieldName + "] since one of the intermediate objects is mapped as a nested object: [" + + mapper.name() + "]"); + } } return objectMapper.getMapper(subfields[subfields.length - 1]); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 9128002eb5a..b2152e41b75 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -24,6 +24,7 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.lucene.Lucene; @@ -281,15 +282,15 @@ public abstract class FieldMapper extends Mapper implements Cloneable { * mappings were not modified. */ public Mapper parse(ParseContext context) throws IOException { - final List fields = new ArrayList<>(2); + final List fields = new ArrayList<>(2); try { parseCreateField(context, fields); - for (Field field : fields) { + for (IndexableField field : fields) { if (!customBoost() // don't set boosts eg. on dv fields && field.fieldType().indexOptions() != IndexOptions.NONE && indexCreatedVersion.before(Version.V_5_0_0_alpha1)) { - field.setBoost(fieldType().boost()); + ((Field)(field)).setBoost(fieldType().boost()); } context.doc().add(field); } @@ -303,7 +304,7 @@ public abstract class FieldMapper extends Mapper implements Cloneable { /** * Parse the field value and populate fields. */ - protected abstract void parseCreateField(ParseContext context, List fields) throws IOException; + protected abstract void parseCreateField(ParseContext context, List fields) throws IOException; /** * Derived classes can override it to specify that boost value is set by derived classes. diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java index 7343963f099..1b4a97a4660 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldNamesFieldMapper.java @@ -248,7 +248,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { if (fieldType().isEnabled() == false) { return; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java deleted file mode 100644 index 655bf4aad02..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.elasticsearch.Version; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.settings.Settings; - -import java.io.IOException; -import java.util.Map; - -/** - * Parsing: We handle: - *

- * - "field" : "geo_hash" - * - "field" : "lat,lon" - * - "field" : { - * "lat" : 1.1, - * "lon" : 2.1 - * } - */ -public class GeoPointFieldMapper extends BaseGeoPointFieldMapper { - - public static final String CONTENT_TYPE = "geo_point"; - - public static class Defaults extends BaseGeoPointFieldMapper.Defaults { - - public static final GeoPointFieldType FIELD_TYPE = new LegacyGeoPointFieldType(); - - static { - FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - FIELD_TYPE.setTokenized(false); - FIELD_TYPE.setOmitNorms(true); - FIELD_TYPE.setDocValuesType(DocValuesType.SORTED_NUMERIC); - FIELD_TYPE.setHasDocValues(true); - FIELD_TYPE.freeze(); - } - } - - /** - * Concrete builder for indexed GeoPointField type - */ - public static class Builder extends BaseGeoPointFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE); - this.builder = this; - } - - @Override - public GeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType, - MappedFieldType defaultFieldType, Settings indexSettings, FieldMapper latMapper, - FieldMapper lonMapper, FieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, - CopyTo copyTo) { - fieldType.setTokenized(false); - if (context.indexCreatedVersion().before(Version.V_2_3_0)) { - fieldType.setNumericPrecisionStep(GeoPointField.PRECISION_STEP); - fieldType.setNumericType(FieldType.LegacyNumericType.LONG); - } - setupFieldType(context); - return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, - geoHashMapper, multiFields, ignoreMalformed, copyTo); - } - - @Override - public GeoPointFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().before(Version.V_2_3_0)) { - fieldType.setNumericPrecisionStep(GeoPointField.PRECISION_STEP); - fieldType.setNumericType(FieldType.LegacyNumericType.LONG); - } - return super.build(context); - } - } - - public static class TypeParser extends BaseGeoPointFieldMapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - return super.parse(name, node, parserContext); - } - } - - public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, - FieldMapper latMapper, FieldMapper lonMapper, - FieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, - ignoreMalformed, copyTo); - } - - @Override - protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException { - if (ignoreMalformed.value() == false) { - if (point.lat() > 90.0 || point.lat() < -90.0) { - throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); - } - if (point.lon() > 180.0 || point.lon() < -180) { - throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); - } - } else { - // LUCENE WATCH: This will be folded back into Lucene's GeoPointField - GeoUtils.normalizePoint(point); - } - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - context.doc().add(new GeoPointField(fieldType().name(), point.lat(), point.lon(), fieldType())); - } - super.parse(context, point, geoHash); - } - - @Override - public LegacyGeoPointFieldType fieldType() { - return (LegacyGeoPointFieldType) super.fieldType(); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index 9c90dd44dbc..f584c216709 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.index.IndexableField; import org.locationtech.spatial4j.shape.Point; import org.locationtech.spatial4j.shape.Shape; import org.locationtech.spatial4j.shape.jts.JtsGeometry; @@ -462,7 +463,7 @@ public class GeoShapeFieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java index d70a50eede9..1b208421a8e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.queries.TermsQuery; import org.apache.lucene.search.BooleanClause; @@ -135,7 +136,7 @@ public class IdFieldMapper extends MetadataFieldMapper { public void postParse(ParseContext context) throws IOException {} @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException {} + protected void parseCreateField(ParseContext context, List fields) throws IOException {} @Override protected String contentType() { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java index e4a1f5ec5eb..72a7244976d 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/IndexFieldMapper.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; @@ -175,7 +176,7 @@ public class IndexFieldMapper extends MetadataFieldMapper { public void postParse(ParseContext context) throws IOException {} @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException {} + protected void parseCreateField(ParseContext context, List fields) throws IOException {} @Override protected String contentType() { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 4be8de2056f..9ccca4db479 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -25,6 +25,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.PointValues; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; @@ -38,7 +39,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper.Defaults; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; import org.joda.time.DateTimeZone; @@ -54,6 +54,10 @@ public class IpFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "ip"; + public static class Defaults { + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); + } + public static class Builder extends FieldMapper.Builder { private Boolean ignoreMalformed; @@ -93,9 +97,6 @@ public class IpFieldMapper extends FieldMapper { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) { - return new LegacyIpFieldMapper.TypeParser().parse(name, node, parserContext); - } Builder builder = new Builder(name); TypeParsers.parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { @@ -285,7 +286,7 @@ public class IpFieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { Object addressAsObject; if (context.externalValueSet()) { addressAsObject = context.externalValue(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 1ba913790a4..68807215027 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -22,9 +22,9 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -33,14 +33,10 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Set; -import static java.util.Collections.unmodifiableList; import static org.elasticsearch.index.mapper.TypeParsers.parseField; /** @@ -50,12 +46,6 @@ public final class KeywordFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "keyword"; - private static final List SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING = unmodifiableList(Arrays.asList( - "type", - // common keyword parameters, for which the upgrade is straightforward - "index", "store", "doc_values", "omit_norms", "norms", "boost", "fields", "copy_to", - "include_in_all", "ignore_above", "index_options", "similarity")); - public static class Defaults { public static final MappedFieldType FIELD_TYPE = new KeywordFieldType(); @@ -114,29 +104,6 @@ public final class KeywordFieldMapper extends FieldMapper { public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)) { - // Downgrade "keyword" to "string" in indexes created in 2.x so you can use modern syntax against old indexes - Set unsupportedParameters = new HashSet<>(node.keySet()); - unsupportedParameters.removeAll(SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING); - if (false == SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING.containsAll(node.keySet())) { - throw new IllegalArgumentException("Automatic downgrade from [keyword] to [string] failed because parameters " - + unsupportedParameters + " are not supported for automatic downgrades."); - } - { // Downgrade "index" - Object index = node.get("index"); - if (index == null || Boolean.TRUE.equals(index)) { - index = "not_analyzed"; - } else if (Boolean.FALSE.equals(index)) { - index = "no"; - } else { - throw new IllegalArgumentException( - "Can't parse [index] value [" + index + "] for field [" + name + "], expected [true] or [false]"); - } - node.put("index", index); - } - - return new StringFieldMapper.TypeParser().parse(name, node, parserContext); - } KeywordFieldMapper.Builder builder = new KeywordFieldMapper.Builder(name); parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { @@ -235,7 +202,7 @@ public final class KeywordFieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { final String value; if (context.externalValueSet()) { value = context.externalValue().toString(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LatLonPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LatLonPointFieldMapper.java index 647dd315a20..f5579751e5b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/LatLonPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/LatLonPointFieldMapper.java @@ -23,7 +23,6 @@ import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.Query; -import org.elasticsearch.Version; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -43,7 +42,6 @@ import java.util.Map; */ public class LatLonPointFieldMapper extends BaseGeoPointFieldMapper { public static final String CONTENT_TYPE = "geo_point"; - public static final Version LAT_LON_FIELD_VERSION = Version.V_5_0_0_beta1; public static class Defaults extends BaseGeoPointFieldMapper.Defaults { public static final LatLonPointFieldType FIELD_TYPE = new LatLonPointFieldType(); @@ -59,6 +57,7 @@ public class LatLonPointFieldMapper extends BaseGeoPointFieldMapper { public static class Builder extends BaseGeoPointFieldMapper.Builder { public Builder(String name) { super(name, Defaults.FIELD_TYPE); + builder = this; } @Override diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyByteFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyByteFieldMapper.java deleted file mode 100644 index 96bde70ae82..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyByteFieldMapper.java +++ /dev/null @@ -1,327 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeByteValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyByteFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "byte"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new ByteFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_8_BIT); - builder = this; - } - - @Override - public LegacyByteFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyByteFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), - coerce(context), includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 32; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyByteFieldMapper.Builder builder = new LegacyByteFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeByteValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - static final class ByteFieldType extends NumberFieldType { - public ByteFieldType() { - super(LegacyNumericType.INT); - } - - protected ByteFieldType(ByteFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new ByteFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public Byte nullValue() { - return (Byte)super.nullValue(); - } - - @Override - public Byte valueForDisplay(Object value) { - if (value == null) { - return null; - } - return ((Number) value).byteValue(); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : (int)parseValue(lowerTerm), - upperTerm == null ? null : (int)parseValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats.Long stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinInt(terms); - long maxValue = LegacyNumericUtils.getMaxInt(terms); - return new FieldStats.Long(maxDoc, terms.getDocCount(), - terms.getSumDocFreq(), terms.getSumTotalTermFreq(), isSearchable(), isAggregatable(), - minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.BYTE); - } - } - - protected LegacyByteFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public ByteFieldType fieldType() { - return (ByteFieldType) super.fieldType(); - } - - private static byte parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).byteValue(); - } - if (value instanceof BytesRef) { - return Byte.parseByte(((BytesRef) value).utf8ToString()); - } - return Byte.parseByte(value.toString()); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - byte value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = Byte.parseByte(sExternalValue); - } - } else { - value = ((Number) externalValue).byteValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), Byte.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - Byte objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = (byte) parser.shortValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = (byte) parser.shortValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomByteNumericField field = new CustomByteNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_8_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - } - - public static class CustomByteNumericField extends CustomNumericField { - - private final byte number; - - public CustomByteNumericField(byte number, MappedFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setIntValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return Byte.toString(number); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyDateFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyDateFieldMapper.java deleted file mode 100644 index 328d16e1e95..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyDateFieldMapper.java +++ /dev/null @@ -1,528 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.joda.DateMathParser; -import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.LocaleUtils; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.mapper.LegacyLongFieldMapper.CustomLongNumericField; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyDateFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "date"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strict_date_optional_time||epoch_millis", Locale.ROOT); - public static final TimeUnit TIME_UNIT = TimeUnit.MILLISECONDS; - public static final DateFieldType FIELD_TYPE = new DateFieldType(); - - static { - FIELD_TYPE.freeze(); - } - - public static final String NULL_VALUE = null; - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - protected String nullValue = Defaults.NULL_VALUE; - - private Locale locale; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); - builder = this; - // do *NOT* rely on the default locale - locale = Locale.ROOT; - } - - @Override - public DateFieldType fieldType() { - return (DateFieldType)fieldType; - } - - public Builder timeUnit(TimeUnit timeUnit) { - fieldType().setTimeUnit(timeUnit); - return this; - } - - public Builder nullValue(String nullValue) { - this.nullValue = nullValue; - return this; - } - - public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { - fieldType().setDateTimeFormatter(dateTimeFormatter); - return this; - } - - @Override - public LegacyDateFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - fieldType.setNullValue(nullValue); - return new LegacyDateFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), - coerce(context), includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected void setupFieldType(BuilderContext context) { - FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter; - if (!locale.equals(dateTimeFormatter.locale())) { - fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale)); - } - super.setupFieldType(context); - } - - public Builder locale(Locale locale) { - this.locale = locale; - return this; - } - - @Override - protected int maxPrecisionStep() { - return 64; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyDateFieldMapper.Builder builder = new LegacyDateFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - boolean configuredFormat = false; - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(propNode.toString()); - iterator.remove(); - } else if (propName.equals("format")) { - builder.dateTimeFormatter(parseDateTimeFormatter(propNode)); - configuredFormat = true; - iterator.remove(); - } else if (propName.equals("numeric_resolution")) { - builder.timeUnit(TimeUnit.valueOf(propNode.toString().toUpperCase(Locale.ROOT))); - iterator.remove(); - } else if (propName.equals("locale")) { - builder.locale(LocaleUtils.parse(propNode.toString())); - iterator.remove(); - } - } - if (!configuredFormat) { - builder.dateTimeFormatter(Defaults.DATE_TIME_FORMATTER); - } - return builder; - } - } - - public static class DateFieldType extends NumberFieldType { - - protected FormatDateTimeFormatter dateTimeFormatter = Defaults.DATE_TIME_FORMATTER; - protected TimeUnit timeUnit = Defaults.TIME_UNIT; - protected DateMathParser dateMathParser = new DateMathParser(dateTimeFormatter); - - public DateFieldType() { - super(LegacyNumericType.LONG); - } - - protected DateFieldType(DateFieldType ref) { - super(ref); - this.dateTimeFormatter = ref.dateTimeFormatter; - this.timeUnit = ref.timeUnit; - this.dateMathParser = ref.dateMathParser; - } - - @Override - public DateFieldType clone() { - return new DateFieldType(this); - } - - @Override - public boolean equals(Object o) { - if (!super.equals(o)) return false; - DateFieldType that = (DateFieldType) o; - return Objects.equals(dateTimeFormatter.format(), that.dateTimeFormatter.format()) && - Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale()) && - Objects.equals(timeUnit, that.timeUnit); - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), dateTimeFormatter.format(), timeUnit); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public void checkCompatibility(MappedFieldType fieldType, List conflicts, boolean strict) { - super.checkCompatibility(fieldType, conflicts, strict); - if (strict) { - DateFieldType other = (DateFieldType)fieldType; - if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [format] across all types."); - } - if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [locale] across all types."); - } - if (Objects.equals(timeUnit(), other.timeUnit()) == false) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [numeric_resolution] across all types."); - } - } - } - - public FormatDateTimeFormatter dateTimeFormatter() { - return dateTimeFormatter; - } - - public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { - checkIfFrozen(); - this.dateTimeFormatter = dateTimeFormatter; - this.dateMathParser = new DateMathParser(dateTimeFormatter); - } - - public TimeUnit timeUnit() { - return timeUnit; - } - - public void setTimeUnit(TimeUnit timeUnit) { - checkIfFrozen(); - this.timeUnit = timeUnit; - this.dateMathParser = new DateMathParser(dateTimeFormatter); - } - - protected DateMathParser dateMathParser() { - return dateMathParser; - } - - private long parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return dateTimeFormatter().parser().parseMillis(((BytesRef) value).utf8ToString()); - } - return dateTimeFormatter().parser().parseMillis(value.toString()); - } - - protected long parseStringValue(String value) { - return dateTimeFormatter().parser().parseMillis(value); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Object valueForDisplay(Object value) { - Long val = (Long) value; - if (val == null) { - return null; - } - return dateTimeFormatter().printer().print(val); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, null, null, context); - } - - @Override - public FieldStats.Date stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinLong(terms); - long maxValue = LegacyNumericUtils.getMaxLong(terms); - return new FieldStats.Date(maxDoc, terms.getDocCount(), - terms.getSumDocFreq(), terms.getSumTotalTermFreq(), isSearchable(), isAggregatable(), - dateTimeFormatter(), minValue, maxValue); - } - - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, - @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryShardContext context) { - return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser, context); - } - - private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, - @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) { - return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), - lowerTerm == null ? null - : parseToMilliseconds(lowerTerm, !includeLower, timeZone, - forcedDateParser == null ? dateMathParser : forcedDateParser, context), - upperTerm == null ? null - : parseToMilliseconds(upperTerm, includeUpper, timeZone, - forcedDateParser == null ? dateMathParser : forcedDateParser, context), - includeLower, includeUpper); - } - - @Override - public Relation isFieldWithinQuery(IndexReader reader, - Object from, Object to, - boolean includeLower, boolean includeUpper, - DateTimeZone timeZone, DateMathParser dateParser, QueryRewriteContext context) throws IOException { - if (dateParser == null) { - dateParser = this.dateMathParser; - } - - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - // no terms, so nothing matches - return Relation.DISJOINT; - } - - long minValue = LegacyNumericUtils.getMinLong(terms); - long maxValue = LegacyNumericUtils.getMaxLong(terms); - - long fromInclusive = Long.MIN_VALUE; - if (from != null) { - fromInclusive = parseToMilliseconds(from, !includeLower, timeZone, dateParser, context); - if (includeLower == false) { - if (fromInclusive == Long.MAX_VALUE) { - return Relation.DISJOINT; - } - ++fromInclusive; - } - } - - long toInclusive = Long.MAX_VALUE; - if (to != null) { - toInclusive = parseToMilliseconds(to, includeUpper, timeZone, dateParser, context); - if (includeUpper == false) { - if (toInclusive == Long.MIN_VALUE) { - return Relation.DISJOINT; - } - --toInclusive; - } - } - - if (minValue >= fromInclusive && maxValue <= toInclusive) { - return Relation.WITHIN; - } else if (maxValue < fromInclusive || minValue > toInclusive) { - return Relation.DISJOINT; - } else { - return Relation.INTERSECTS; - } - } - - public long parseToMilliseconds(Object value, boolean inclusive, @Nullable DateTimeZone zone, - @Nullable DateMathParser forcedDateParser, QueryRewriteContext context) { - if (value instanceof Long) { - return ((Long) value).longValue(); - } - - DateMathParser dateParser = dateMathParser(); - if (forcedDateParser != null) { - dateParser = forcedDateParser; - } - - String strValue; - if (value instanceof BytesRef) { - strValue = ((BytesRef) value).utf8ToString(); - } else { - strValue = value.toString(); - } - return dateParser.parse(strValue, context::nowInMillis, inclusive, zone); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG); - } - - @Override - public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { - FormatDateTimeFormatter dateTimeFormatter = this.dateTimeFormatter; - if (format != null) { - dateTimeFormatter = Joda.forPattern(format); - } - if (timeZone == null) { - timeZone = DateTimeZone.UTC; - } - return new DocValueFormat.DateTime(dateTimeFormatter, timeZone); - } - } - - protected LegacyDateFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed,Explicit coerce, - Boolean includeInAll, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public DateFieldType fieldType() { - return (DateFieldType) super.fieldType(); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - String dateAsString = null; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - dateAsString = (String) externalValue; - if (dateAsString == null) { - dateAsString = fieldType().nullValueAsString(); - } - } else { - XContentParser parser = context.parser(); - XContentParser.Token token = parser.currentToken(); - if (token == XContentParser.Token.VALUE_NULL) { - dateAsString = fieldType().nullValueAsString(); - } else if (token == XContentParser.Token.VALUE_NUMBER) { - dateAsString = parser.text(); - } else if (token == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (token == XContentParser.Token.VALUE_NULL) { - dateAsString = fieldType().nullValueAsString(); - } else { - dateAsString = parser.text(); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - } else { - dateAsString = parser.text(); - } - } - - Long value = null; - if (dateAsString != null) { - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), dateAsString, boost); - } - value = fieldType().parseStringValue(dateAsString); - } - - if (value != null) { - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - builder.field("format", fieldType().dateTimeFormatter().format()); - if (includeDefaults || fieldType().nullValueAsString() != null) { - builder.field("null_value", fieldType().nullValueAsString()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - if (includeDefaults || fieldType().timeUnit() != Defaults.TIME_UNIT) { - builder.field("numeric_resolution", fieldType().timeUnit().name().toLowerCase(Locale.ROOT)); - } - // only serialize locale if needed, ROOT is the default, so no need to serialize that case as well... - if (fieldType().dateTimeFormatter().locale() != null && fieldType().dateTimeFormatter().locale() != Locale.ROOT) { - builder.field("locale", fieldType().dateTimeFormatter().locale()); - } else if (includeDefaults) { - if (fieldType().dateTimeFormatter().locale() == null) { - builder.field("locale", Locale.ROOT); - } else { - builder.field("locale", fieldType().dateTimeFormatter().locale()); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyDoubleFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyDoubleFieldMapper.java deleted file mode 100644 index c0a6dd8bacf..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyDoubleFieldMapper.java +++ /dev/null @@ -1,330 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.Numbers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeDoubleValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyDoubleFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "double"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new DoubleFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); - builder = this; - } - - @Override - public LegacyDoubleFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyDoubleFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), - includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 64; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyDoubleFieldMapper.Builder builder = new LegacyDoubleFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("nullValue") || propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeDoubleValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - public static final class DoubleFieldType extends NumberFieldType { - - public DoubleFieldType() { - super(LegacyNumericType.DOUBLE); - } - - protected DoubleFieldType(DoubleFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new DoubleFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public java.lang.Double nullValue() { - return (java.lang.Double)super.nullValue(); - } - - @Override - public java.lang.Double valueForDisplay(Object value) { - if (value == null) { - return null; - } - if (value instanceof Number) { - return ((Number) value).doubleValue(); - } - if (value instanceof BytesRef) { - return Numbers.bytesToDouble((BytesRef) value); - } - return java.lang.Double.parseDouble(value.toString()); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - long longValue = NumericUtils.doubleToSortableLong(parseDoubleValue(value)); - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.longToPrefixCoded(longValue, 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newDoubleRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseDoubleValue(lowerTerm), - upperTerm == null ? null : parseDoubleValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats.Double stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - double minValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMinLong(terms)); - double maxValue = NumericUtils.sortableLongToDouble(LegacyNumericUtils.getMaxLong(terms)); - return new FieldStats.Double(maxDoc, terms.getDocCount(), - terms.getSumDocFreq(), terms.getSumTotalTermFreq(), isSearchable(), isAggregatable(), - minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.DOUBLE); - } - } - - protected LegacyDoubleFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, - Explicit coerce, Boolean includeInAll, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public DoubleFieldType fieldType() { - return (DoubleFieldType) super.fieldType(); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - double value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = java.lang.Double.parseDouble(sExternalValue); - } - } else { - value = ((Number) externalValue).doubleValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), java.lang.Double.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - java.lang.Double objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = parser.doubleValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = parser.doubleValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomDoubleNumericField field = new CustomDoubleNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, NumericUtils.doubleToSortableLong(value)); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - } - - public static class CustomDoubleNumericField extends CustomNumericField { - - private final double number; - - public CustomDoubleNumericField(double number, NumberFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setDoubleValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return java.lang.Double.toString(number); - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyFloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyFloatFieldMapper.java deleted file mode 100644 index 43307373cb3..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyFloatFieldMapper.java +++ /dev/null @@ -1,324 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyFloatFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "float"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new FloatFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); - builder = this; - } - - @Override - public LegacyFloatFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyFloatFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), coerce(context), - includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 32; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyFloatFieldMapper.Builder builder = new LegacyFloatFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeFloatValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - static final class FloatFieldType extends NumberFieldType { - - public FloatFieldType() { - super(LegacyNumericType.FLOAT); - } - - protected FloatFieldType(FloatFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new FloatFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public Float nullValue() { - return (Float)super.nullValue(); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - int intValue = NumericUtils.floatToSortableInt(parseValue(value)); - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.intToPrefixCoded(intValue, 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newFloatRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats.Double stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - float minValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMinInt(terms)); - float maxValue = NumericUtils.sortableIntToFloat(LegacyNumericUtils.getMaxInt(terms)); - return new FieldStats.Double(maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), - isSearchable(), isAggregatable(), minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.FLOAT); - } - } - - protected LegacyFloatFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public FloatFieldType fieldType() { - return (FloatFieldType) super.fieldType(); - } - - private static float parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).floatValue(); - } - if (value instanceof BytesRef) { - return Float.parseFloat(((BytesRef) value).utf8ToString()); - } - return Float.parseFloat(value.toString()); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - float value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = Float.parseFloat(sExternalValue); - } - } else { - value = ((Number) externalValue).floatValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), Float.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - Float objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = parser.floatValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = parser.floatValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomFloatNumericField field = new CustomFloatNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, NumericUtils.floatToSortableInt(value)); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - } - - public static class CustomFloatNumericField extends CustomNumericField { - - private final float number; - - public CustomFloatNumericField(float number, NumberFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setFloatValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return Float.toString(number); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java deleted file mode 100644 index fc46a08ce1a..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyGeoPointFieldMapper.java +++ /dev/null @@ -1,367 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import com.carrotsearch.hppc.ObjectHashSet; -import com.carrotsearch.hppc.cursors.ObjectCursor; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.util.ByteUtils; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.support.XContentMapValues; - -import java.io.IOException; -import java.util.Iterator; -import java.util.Map; - - -/** - * Parsing: We handle: - *

- * - "field" : "geo_hash" - * - "field" : "lat,lon" - * - "field" : { - * "lat" : 1.1, - * "lon" : 2.1 - * } - */ -public class LegacyGeoPointFieldMapper extends BaseGeoPointFieldMapper implements ArrayValueMapperParser { - - public static final String CONTENT_TYPE = "geo_point"; - - public static class Names extends BaseGeoPointFieldMapper.Names { - public static final String COERCE = "coerce"; - } - - public static class Defaults extends BaseGeoPointFieldMapper.Defaults{ - public static final Explicit COERCE = new Explicit<>(false, false); - - public static final GeoPointFieldType FIELD_TYPE = new LegacyGeoPointFieldType(); - - static { - FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - FIELD_TYPE.setTokenized(false); - FIELD_TYPE.setOmitNorms(true); - FIELD_TYPE.freeze(); - } - } - - /** - * Concrete builder for legacy GeoPointField - */ - public static class Builder extends BaseGeoPointFieldMapper.Builder { - - private Boolean coerce; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE); - this.builder = this; - } - - public Builder coerce(boolean coerce) { - this.coerce = coerce; - return builder; - } - - protected Explicit coerce(BuilderContext context) { - if (coerce != null) { - return new Explicit<>(coerce, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); - } - return Defaults.COERCE; - } - - @Override - public LegacyGeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType, - MappedFieldType defaultFieldType, Settings indexSettings, FieldMapper latMapper, FieldMapper lonMapper, - FieldMapper geoHashMapper, MultiFields multiFields, Explicit ignoreMalformed, - CopyTo copyTo) { - fieldType.setTokenized(false); - setupFieldType(context); - fieldType.setHasDocValues(false); - defaultFieldType.setHasDocValues(false); - return new LegacyGeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, - lonMapper, geoHashMapper, multiFields, ignoreMalformed, coerce(context), copyTo); - } - - @Override - public LegacyGeoPointFieldMapper build(BuilderContext context) { - return super.build(context); - } - } - - public static Builder parse(Builder builder, Map node, Mapper.TypeParser.ParserContext parserContext) - throws MapperParsingException { - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals(Names.COERCE)) { - builder.coerce = XContentMapValues.lenientNodeBooleanValue(propNode); - iterator.remove(); - } - } - return builder; - } - - /** - * A byte-aligned fixed-length encoding for latitudes and longitudes. - */ - public static final class Encoding { - - // With 14 bytes we already have better precision than a double since a double has 11 bits of exponent - private static final int MAX_NUM_BYTES = 14; - - private static final Encoding[] INSTANCES; - static { - INSTANCES = new Encoding[MAX_NUM_BYTES + 1]; - for (int numBytes = 2; numBytes <= MAX_NUM_BYTES; numBytes += 2) { - INSTANCES[numBytes] = new Encoding(numBytes); - } - } - - /** Get an instance based on the number of bytes that has been used to encode values. */ - public static Encoding of(int numBytesPerValue) { - final Encoding instance = INSTANCES[numBytesPerValue]; - if (instance == null) { - throw new IllegalStateException("No encoding for " + numBytesPerValue + " bytes per value"); - } - return instance; - } - - /** Get an instance based on the expected precision. Here are examples of the number of required bytes per value - * depending on the - * expected precision:

    - *
  • 1km: 4 bytes
  • - *
  • 3m: 6 bytes
  • - *
  • 1m: 8 bytes
  • - *
  • 1cm: 8 bytes
  • - *
  • 1mm: 10 bytes
*/ - public static Encoding of(DistanceUnit.Distance precision) { - for (Encoding encoding : INSTANCES) { - if (encoding != null && encoding.precision().compareTo(precision) <= 0) { - return encoding; - } - } - return INSTANCES[MAX_NUM_BYTES]; - } - - private final DistanceUnit.Distance precision; - private final int numBytes; - private final int numBytesPerCoordinate; - private final double factor; - - private Encoding(int numBytes) { - assert numBytes >= 1 && numBytes <= MAX_NUM_BYTES; - assert (numBytes & 1) == 0; // we don't support odd numBytes for the moment - this.numBytes = numBytes; - this.numBytesPerCoordinate = numBytes / 2; - this.factor = Math.pow(2, - numBytesPerCoordinate * 8 + 9); - assert (1L << (numBytesPerCoordinate * 8 - 1)) * factor > 180 && (1L << (numBytesPerCoordinate * 8 - 2)) - * factor < 180 : numBytesPerCoordinate + " " + factor; - if (numBytes == MAX_NUM_BYTES) { - // no precision loss compared to a double - precision = new DistanceUnit.Distance(0, DistanceUnit.DEFAULT); - } else { - // factor/2 because we use Math.round instead of a cast to convert the double to a long - precision = new DistanceUnit.Distance( - GeoDistance.PLANE.calculate(0, 0, factor / 2, factor / 2, DistanceUnit.DEFAULT), - DistanceUnit.DEFAULT); - } - } - - public DistanceUnit.Distance precision() { - return precision; - } - - /** The number of bytes required to encode a single geo point. */ - public int numBytes() { - return numBytes; - } - - /** The number of bits required to encode a single coordinate of a geo point. */ - public int numBitsPerCoordinate() { - return numBytesPerCoordinate << 3; - } - - /** Return the bits that encode a latitude/longitude. */ - public long encodeCoordinate(double lat) { - return Math.round((lat + 180) / factor); - } - - /** Decode a sequence of bits into the original coordinate. */ - public double decodeCoordinate(long bits) { - return bits * factor - 180; - } - - private void encodeBits(long bits, byte[] out, int offset) { - for (int i = 0; i < numBytesPerCoordinate; ++i) { - out[offset++] = (byte) bits; - bits >>>= 8; - } - assert bits == 0; - } - - private long decodeBits(byte [] in, int offset) { - long r = in[offset++] & 0xFFL; - for (int i = 1; i < numBytesPerCoordinate; ++i) { - r = (in[offset++] & 0xFFL) << (i * 8); - } - return r; - } - - /** Encode a geo point into a byte-array, over {@link #numBytes()} bytes. */ - public void encode(double lat, double lon, byte[] out, int offset) { - encodeBits(encodeCoordinate(lat), out, offset); - encodeBits(encodeCoordinate(lon), out, offset + numBytesPerCoordinate); - } - - /** Decode a geo point from a byte-array, reading {@link #numBytes()} bytes. */ - public GeoPoint decode(byte[] in, int offset, GeoPoint out) { - final long latBits = decodeBits(in, offset); - final long lonBits = decodeBits(in, offset + numBytesPerCoordinate); - return decode(latBits, lonBits, out); - } - - /** Decode a geo point from the bits of the encoded latitude and longitudes. */ - public GeoPoint decode(long latBits, long lonBits, GeoPoint out) { - final double lat = decodeCoordinate(latBits); - final double lon = decodeCoordinate(lonBits); - return out.reset(lat, lon); - } - - } - - protected Explicit coerce; - - public LegacyGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Settings indexSettings, FieldMapper latMapper, FieldMapper lonMapper, FieldMapper geoHashMapper, - MultiFields multiFields, Explicit ignoreMalformed, Explicit coerce, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields, - ignoreMalformed, copyTo); - this.coerce = coerce; - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - super.doMerge(mergeWith, updateAllTypes); - - LegacyGeoPointFieldMapper gpfmMergeWith = (LegacyGeoPointFieldMapper) mergeWith; - if (gpfmMergeWith.coerce.explicit()) { - if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) { - throw new IllegalArgumentException("mapper [" + fieldType().name() + "] has different [coerce]"); - } - } - - if (gpfmMergeWith.coerce.explicit()) { - this.coerce = gpfmMergeWith.coerce; - } - } - - @Override - protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException { - boolean validPoint = false; - if (coerce.value() == false && ignoreMalformed.value() == false) { - if (point.lat() > 90.0 || point.lat() < -90.0) { - throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); - } - if (point.lon() > 180.0 || point.lon() < -180) { - throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); - } - validPoint = true; - } - - if (coerce.value() && validPoint == false) { - // by setting coerce to false we are assuming all geopoints are already in a valid coordinate system - // thus this extra step can be skipped - GeoUtils.normalizePoint(point, true, true); - } - - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - Field field = new Field(fieldType().name(), Double.toString(point.lat()) + ',' - + Double.toString(point.lon()), fieldType()); - context.doc().add(field); - } - - super.parse(context, point, geoHash); - - if (fieldType().hasDocValues()) { - CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc() - .getByKey(fieldType().name()); - if (field == null) { - field = new CustomGeoPointDocValuesField(fieldType().name(), point.lat(), point.lon()); - context.doc().addWithKey(fieldType().name(), field); - } else { - field.add(point.lat(), point.lon()); - } - } - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - if (includeDefaults || coerce.explicit()) { - builder.field(Names.COERCE, coerce.value()); - } - } - - @Override - public LegacyGeoPointFieldType fieldType() { - return (LegacyGeoPointFieldType) super.fieldType(); - } - - public static class CustomGeoPointDocValuesField extends CustomDocValuesField { - - private final ObjectHashSet points; - - public CustomGeoPointDocValuesField(String name, double lat, double lon) { - super(name); - points = new ObjectHashSet<>(2); - points.add(new GeoPoint(lat, lon)); - } - - public void add(double lat, double lon) { - points.add(new GeoPoint(lat, lon)); - } - - @Override - public BytesRef binaryValue() { - final byte[] bytes = new byte[points.size() * 16]; - int off = 0; - for (Iterator> it = points.iterator(); it.hasNext(); ) { - final GeoPoint point = it.next().value; - ByteUtils.writeDoubleLE(point.getLat(), bytes, off); - ByteUtils.writeDoubleLE(point.getLon(), bytes, off + 8); - off += 16; - } - return new BytesRef(bytes); - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIntegerFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyIntegerFieldMapper.java deleted file mode 100644 index b2016c75552..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIntegerFieldMapper.java +++ /dev/null @@ -1,331 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyIntegerFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "integer"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new IntegerFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); - builder = this; - } - - public Builder nullValue(int nullValue) { - this.fieldType.setNullValue(nullValue); - return this; - } - - @Override - public LegacyIntegerFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyIntegerFieldMapper(name, fieldType, defaultFieldType, - ignoreMalformed(context), coerce(context), includeInAll, - context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - @Override - protected int maxPrecisionStep() { - return 32; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyIntegerFieldMapper.Builder builder = new LegacyIntegerFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeIntegerValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - public static final class IntegerFieldType extends NumberFieldType { - - public IntegerFieldType() { - super(LegacyNumericType.INT); - } - - protected IntegerFieldType(IntegerFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new IntegerFieldType(this); - } - - @Override - public String typeName() { - return "integer"; - } - - @Override - public Integer nullValue() { - return (Integer)super.nullValue(); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats.Long stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinInt(terms); - long maxValue = LegacyNumericUtils.getMaxInt(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), - isSearchable(), isAggregatable(), minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.INT); - } - } - - protected LegacyIntegerFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public IntegerFieldType fieldType() { - return (IntegerFieldType) super.fieldType(); - } - - private static int parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).intValue(); - } - if (value instanceof BytesRef) { - return Integer.parseInt(((BytesRef) value).utf8ToString()); - } - return Integer.parseInt(value.toString()); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - int value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = Integer.parseInt(sExternalValue); - } - } else { - value = ((Number) externalValue).intValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), Integer.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - Integer objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = parser.intValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = parser.intValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - addIntegerFields(context, fields, value, boost); - } - - protected void addIntegerFields(ParseContext context, List fields, int value, float boost) { - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomIntegerNumericField field = new CustomIntegerNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_32_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - } - - public static class CustomIntegerNumericField extends CustomNumericField { - - private final int number; - - public CustomIntegerNumericField(int number, MappedFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setIntValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return Integer.toString(number); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpFieldMapper.java deleted file mode 100644 index 37c37cc1b80..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpFieldMapper.java +++ /dev/null @@ -1,340 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.network.Cidrs; -import org.elasticsearch.common.network.InetAddresses; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.mapper.LegacyLongFieldMapper.CustomLongNumericField; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; - -import java.io.IOException; -import java.net.InetAddress; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.regex.Pattern; - -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyIpFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "ip"; - public static final long MAX_IP = 4294967296L; - - public static String longToIp(long longIp) { - int octet3 = (int) ((longIp >> 24) % 256); - int octet2 = (int) ((longIp >> 16) % 256); - int octet1 = (int) ((longIp >> 8) % 256); - int octet0 = (int) ((longIp) % 256); - return octet3 + "." + octet2 + "." + octet1 + "." + octet0; - } - - private static final Pattern pattern = Pattern.compile("\\."); - - public static long ipToLong(String ip) { - try { - if (!InetAddresses.isInetAddress(ip)) { - throw new IllegalArgumentException("failed to parse ip [" + ip + "], not a valid ip address"); - } - String[] octets = pattern.split(ip); - if (octets.length != 4) { - throw new IllegalArgumentException("failed to parse ip [" + ip + "], not a valid ipv4 address (4 dots)"); - } - return (Long.parseLong(octets[0]) << 24) + (Integer.parseInt(octets[1]) << 16) + - (Integer.parseInt(octets[2]) << 8) + Integer.parseInt(octets[3]); - } catch (Exception e) { - if (e instanceof IllegalArgumentException) { - throw (IllegalArgumentException) e; - } - throw new IllegalArgumentException("failed to parse ip [" + ip + "]", e); - } - } - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final String NULL_VALUE = null; - - public static final MappedFieldType FIELD_TYPE = new IpFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - protected String nullValue = Defaults.NULL_VALUE; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); - builder = this; - } - - @Override - public LegacyIpFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyIpFieldMapper(name, fieldType, defaultFieldType, ignoreMalformed(context), - coerce(context), includeInAll, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 64; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyIpFieldMapper.Builder builder = new Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(propNode.toString()); - iterator.remove(); - } - } - return builder; - } - } - - public static final class IpFieldType extends LegacyLongFieldMapper.LongFieldType { - - public IpFieldType() { - } - - protected IpFieldType(IpFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new IpFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - /** - * IPs should return as a string. - */ - @Override - public Object valueForDisplay(Object value) { - Long val = (Long) value; - if (val == null) { - return null; - } - return longToIp(val); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.longToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query termQuery(Object value, @Nullable QueryShardContext context) { - if (value != null) { - String term; - if (value instanceof BytesRef) { - term = ((BytesRef) value).utf8ToString(); - } else { - term = value.toString(); - } - long[] fromTo; - // assume that the term is either a CIDR range or the - // term is a single IPv4 address; if either of these - // assumptions is wrong, the CIDR parsing will fail - // anyway, and that is okay - if (term.contains("/")) { - // treat the term as if it is in CIDR notation - fromTo = Cidrs.cidrMaskToMinMax(term); - } else { - // treat the term as if it is a single IPv4, and - // apply a CIDR mask equivalent to the host route - fromTo = Cidrs.cidrMaskToMinMax(term + "/32"); - } - if (fromTo != null) { - return rangeQuery(fromTo[0] == 0 ? null : fromTo[0], - fromTo[1] == MAX_IP ? null : fromTo[1], true, false, context); - } - } - return super.termQuery(value, context); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseValue(lowerTerm), - upperTerm == null ? null : parseValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinLong(terms); - long maxValue = LegacyNumericUtils.getMaxLong(terms); - return new FieldStats.Ip(maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), - isSearchable(), isAggregatable(), - InetAddress.getByName(longToIp(minValue)), - InetAddress.getByName(longToIp(maxValue))); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new IndexFieldData.Builder() { - @Override - public IndexFieldData build(IndexSettings indexSettings, - MappedFieldType fieldType, IndexFieldDataCache cache, - CircuitBreakerService breakerService, MapperService mapperService) { - return new LegacyIpIndexFieldData(indexSettings.getIndex(), name()); - } - }; - } - - @Override - public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { - if (format != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom formats"); - } - if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() - + "] does not support custom time zones"); - } - return DocValueFormat.IP; - } - } - - protected LegacyIpFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - private static long parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } - if (value instanceof BytesRef) { - return ipToLong(((BytesRef) value).utf8ToString()); - } - return ipToLong(value.toString()); - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - String ipAsString; - if (context.externalValueSet()) { - ipAsString = (String) context.externalValue(); - if (ipAsString == null) { - ipAsString = fieldType().nullValueAsString(); - } - } else { - if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) { - ipAsString = fieldType().nullValueAsString(); - } else { - ipAsString = context.parser().text(); - } - } - - if (ipAsString == null) { - return; - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), ipAsString, fieldType().boost()); - } - - final long value = ipToLong(ipAsString); - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); - if (fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(fieldType().boost()); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValueAsString() != null) { - builder.field("null_value", fieldType().nullValueAsString()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java deleted file mode 100644 index feb3328227d..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyIpIndexFieldData.java +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.apache.logging.log4j.Logger; -import org.apache.lucene.document.InetAddressPoint; -import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.fielddata.AtomicFieldData; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; -import org.elasticsearch.index.fielddata.ScriptDocValues; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; -import org.elasticsearch.search.MultiValueMode; - -import java.io.IOException; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.nio.ByteBuffer; - -final class LegacyIpIndexFieldData implements IndexFieldData { - - protected final Index index; - protected final String fieldName; - protected final Logger logger; - - public LegacyIpIndexFieldData(Index index, String fieldName) { - this.index = index; - this.fieldName = fieldName; - this.logger = Loggers.getLogger(getClass()); - } - - public String getFieldName() { - return fieldName; - } - - public void clear() { - // nothing to do - } - - public void clear(IndexReader reader) { - // nothing to do - } - - public Index index() { - return index; - } - - @Override - public AtomicFieldData load(LeafReaderContext context) { - return new AtomicFieldData() { - - @Override - public void close() { - // no-op - } - - @Override - public long ramBytesUsed() { - return 0; - } - - @Override - public ScriptDocValues getScriptValues() { - throw new UnsupportedOperationException("Cannot run scripts on ip fields"); - } - - @Override - public SortedBinaryDocValues getBytesValues() { - SortedNumericDocValues values; - try { - values = DocValues.getSortedNumeric(context.reader(), fieldName); - } catch (IOException e) { - throw new IllegalStateException("Cannot load doc values", e); - } - return new SortedBinaryDocValues() { - - final ByteBuffer scratch = ByteBuffer.allocate(4); - - @Override - public BytesRef valueAt(int index) { - // we do not need to reorder ip addresses since both the numeric - // encoding of LegacyIpFieldMapper and the binary encoding of - // IpFieldMapper match the sort order of ip addresses - long ip = values.valueAt(index); - scratch.putInt(0, (int) ip); - InetAddress inet; - try { - inet = InetAddress.getByAddress(scratch.array()); - } catch (UnknownHostException e) { - throw new IllegalStateException("Cannot happen", e); - } - byte[] encoded = InetAddressPoint.encode(inet); - return new BytesRef(encoded); - } - - @Override - public void setDocument(int docId) { - values.setDocument(docId); - } - - @Override - public int count() { - return values.count(); - } - }; - } - }; - } - - @Override - public AtomicFieldData loadDirect(LeafReaderContext context) - throws Exception { - return load(context); - } - - @Override - public IndexFieldData.XFieldComparatorSource comparatorSource( - Object missingValue, MultiValueMode sortMode, Nested nested) { - return new BytesRefFieldComparatorSource(this, missingValue, sortMode, nested); - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyLongFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyLongFieldMapper.java deleted file mode 100644 index 110259421c9..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyLongFieldMapper.java +++ /dev/null @@ -1,317 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeLongValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyLongFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "long"; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new LongFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_64_BIT); - builder = this; - } - - public Builder nullValue(long nullValue) { - this.fieldType.setNullValue(nullValue); - return this; - } - - @Override - public LegacyLongFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyLongFieldMapper(name, fieldType, defaultFieldType, - ignoreMalformed(context), coerce(context), includeInAll, context.indexSettings(), - multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 64; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyLongFieldMapper.Builder builder = new LegacyLongFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeLongValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - public static class LongFieldType extends NumberFieldType { - - public LongFieldType() { - super(LegacyNumericType.LONG); - } - - protected LongFieldType(LongFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new LongFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public Long nullValue() { - return (Long)super.nullValue(); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.longToPrefixCoded(parseLongValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newLongRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : parseLongValue(lowerTerm), - upperTerm == null ? null : parseLongValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinLong(terms); - long maxValue = LegacyNumericUtils.getMaxLong(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), - isSearchable(), isAggregatable(), minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG); - } - } - - protected LegacyLongFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public LongFieldType fieldType() { - return (LongFieldType) super.fieldType(); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - long value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = Long.parseLong(sExternalValue); - } - } else { - value = ((Number) externalValue).longValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), Long.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - Long objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = parser.longValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = parser.longValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != Defaults.PRECISION_STEP_64_BIT) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - } - - public static class CustomLongNumericField extends CustomNumericField { - - private final long number; - - public CustomLongNumericField(long number, MappedFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setLongValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return Long.toString(number); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapper.java deleted file mode 100644 index 20f248fdb5a..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapper.java +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import java.io.IOException; -import java.io.Reader; -import java.util.List; - -import org.apache.lucene.analysis.LegacyNumericTokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.SortedNumericDocValuesField; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.search.DocValueFormat; -import org.joda.time.DateTimeZone; - -public abstract class LegacyNumberFieldMapper extends FieldMapper { - // this is private since it has a different default - private static final Setting COERCE_SETTING = - Setting.boolSetting("index.mapping.coerce", true, Property.IndexScope); - - public static class Defaults { - - public static final int PRECISION_STEP_8_BIT = Integer.MAX_VALUE; // 1tpv: 256 terms at most, not useful - public static final int PRECISION_STEP_16_BIT = 8; // 2tpv - public static final int PRECISION_STEP_32_BIT = 8; // 4tpv - public static final int PRECISION_STEP_64_BIT = 16; // 4tpv - - public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); - public static final Explicit COERCE = new Explicit<>(true, false); - } - - public abstract static class Builder extends FieldMapper.Builder { - - private Boolean ignoreMalformed; - - private Boolean coerce; - - public Builder(String name, MappedFieldType fieldType, int defaultPrecisionStep) { - super(name, fieldType, fieldType); - this.fieldType.setNumericPrecisionStep(defaultPrecisionStep); - } - - public T precisionStep(int precisionStep) { - fieldType.setNumericPrecisionStep(precisionStep); - return builder; - } - - public T ignoreMalformed(boolean ignoreMalformed) { - this.ignoreMalformed = ignoreMalformed; - return builder; - } - - protected Explicit ignoreMalformed(BuilderContext context) { - if (ignoreMalformed != null) { - return new Explicit<>(ignoreMalformed, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); - } - return Defaults.IGNORE_MALFORMED; - } - - public T coerce(boolean coerce) { - this.coerce = coerce; - return builder; - } - - protected Explicit coerce(BuilderContext context) { - if (coerce != null) { - return new Explicit<>(coerce, true); - } - if (context.indexSettings() != null) { - return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); - } - return Defaults.COERCE; - } - - protected void setupFieldType(BuilderContext context) { - super.setupFieldType(context); - int precisionStep = fieldType.numericPrecisionStep(); - if (precisionStep <= 0 || precisionStep >= maxPrecisionStep()) { - fieldType.setNumericPrecisionStep(Integer.MAX_VALUE); - } - } - - protected abstract int maxPrecisionStep(); - } - - public abstract static class NumberFieldType extends TermBasedFieldType { - - public NumberFieldType(LegacyNumericType numericType) { - setTokenized(false); - setOmitNorms(true); - setIndexOptions(IndexOptions.DOCS); - setStoreTermVectors(false); - setNumericType(numericType); - } - - protected NumberFieldType(NumberFieldType ref) { - super(ref); - } - - @Override - public void checkCompatibility(MappedFieldType other, - List conflicts, boolean strict) { - super.checkCompatibility(other, conflicts, strict); - if (numericPrecisionStep() != other.numericPrecisionStep()) { - conflicts.add("mapper [" + name() + "] has different [precision_step] values"); - } - } - - public abstract NumberFieldType clone(); - - @Override - public DocValueFormat docValueFormat(@Nullable String format, DateTimeZone timeZone) { - if (timeZone != null) { - throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support custom time zones"); - } - if (format == null) { - return DocValueFormat.RAW; - } else { - return new DocValueFormat.Decimal(format); - } - } - } - - protected Boolean includeInAll; - - protected Explicit ignoreMalformed; - - protected Explicit coerce; - - protected LegacyNumberFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - assert fieldType.tokenized() == false; - this.ignoreMalformed = ignoreMalformed; - this.coerce = coerce; - this.includeInAll = includeInAll; - } - - @Override - protected LegacyNumberFieldMapper clone() { - return (LegacyNumberFieldMapper) super.clone(); - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - RuntimeException e = null; - try { - innerParseCreateField(context, fields); - } catch (IllegalArgumentException e1) { - e = e1; - } catch (MapperParsingException e2) { - e = e2; - } - - if (e != null && !ignoreMalformed.value()) { - throw e; - } - } - - protected abstract void innerParseCreateField(ParseContext context, List fields) throws IOException; - - protected final void addDocValue(ParseContext context, List fields, long value) { - fields.add(new SortedNumericDocValuesField(fieldType().name(), value)); - } - - /** - * Converts an object value into a double - */ - public static double parseDoubleValue(Object value) { - if (value instanceof Number) { - return ((Number) value).doubleValue(); - } - - if (value instanceof BytesRef) { - return Double.parseDouble(((BytesRef) value).utf8ToString()); - } - - return Double.parseDouble(value.toString()); - } - - /** - * Converts an object value into a long - */ - public static long parseLongValue(Object value) { - if (value instanceof Number) { - return ((Number) value).longValue(); - } - - if (value instanceof BytesRef) { - return Long.parseLong(((BytesRef) value).utf8ToString()); - } - - return Long.parseLong(value.toString()); - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - super.doMerge(mergeWith, updateAllTypes); - LegacyNumberFieldMapper nfmMergeWith = (LegacyNumberFieldMapper) mergeWith; - - this.includeInAll = nfmMergeWith.includeInAll; - if (nfmMergeWith.ignoreMalformed.explicit()) { - this.ignoreMalformed = nfmMergeWith.ignoreMalformed; - } - if (nfmMergeWith.coerce.explicit()) { - this.coerce = nfmMergeWith.coerce; - } - } - - // used to we can use a numeric field in a document that is then parsed twice! - public abstract static class CustomNumericField extends Field { - - private ThreadLocal tokenStream = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(fieldType().numericPrecisionStep()); - } - }; - - private static ThreadLocal tokenStream4 = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(4); - } - }; - - private static ThreadLocal tokenStream8 = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(8); - } - }; - - private static ThreadLocal tokenStream16 = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(16); - } - }; - - private static ThreadLocal tokenStreamMax = new ThreadLocal() { - @Override - protected LegacyNumericTokenStream initialValue() { - return new LegacyNumericTokenStream(Integer.MAX_VALUE); - } - }; - - public CustomNumericField(Number value, MappedFieldType fieldType) { - super(fieldType.name(), fieldType); - if (value != null) { - this.fieldsData = value; - } - } - - protected LegacyNumericTokenStream getCachedStream() { - if (fieldType().numericPrecisionStep() == 4) { - return tokenStream4.get(); - } else if (fieldType().numericPrecisionStep() == 8) { - return tokenStream8.get(); - } else if (fieldType().numericPrecisionStep() == 16) { - return tokenStream16.get(); - } else if (fieldType().numericPrecisionStep() == Integer.MAX_VALUE) { - return tokenStreamMax.get(); - } - return tokenStream.get(); - } - - @Override - public String stringValue() { - return null; - } - - @Override - public Reader readerValue() { - return null; - } - - public abstract String numericAsString(); - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || ignoreMalformed.explicit()) { - builder.field("ignore_malformed", ignoreMalformed.value()); - } - if (includeDefaults || coerce.explicit()) { - builder.field("coerce", coerce.value()); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyShortFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyShortFieldMapper.java deleted file mode 100644 index c15f149eb66..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyShortFieldMapper.java +++ /dev/null @@ -1,332 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.action.fieldstats.FieldStats; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeShortValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -public class LegacyShortFieldMapper extends LegacyNumberFieldMapper { - - public static final String CONTENT_TYPE = "short"; - public static final int DEFAULT_PRECISION_STEP = 8; - - public static class Defaults extends LegacyNumberFieldMapper.Defaults { - public static final MappedFieldType FIELD_TYPE = new ShortFieldType(); - - static { - FIELD_TYPE.freeze(); - } - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, DEFAULT_PRECISION_STEP); - builder = this; - } - - @Override - public LegacyShortFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyShortFieldMapper(name, fieldType, defaultFieldType, - ignoreMalformed(context), coerce(context), includeInAll, - context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 32; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyShortFieldMapper.Builder builder = new LegacyShortFieldMapper.Builder(name); - parseNumberField(builder, name, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(nodeShortValue(propNode)); - iterator.remove(); - } - } - return builder; - } - } - - static final class ShortFieldType extends NumberFieldType { - - public ShortFieldType() { - super(LegacyNumericType.INT); - } - - protected ShortFieldType(ShortFieldType ref) { - super(ref); - } - - @Override - public NumberFieldType clone() { - return new ShortFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public Short nullValue() { - return (Short)super.nullValue(); - } - - @Override - public Short valueForDisplay(Object value) { - if (value == null) { - return null; - } - return ((Number) value).shortValue(); - } - - @Override - public BytesRef indexedValueForSearch(Object value) { - BytesRefBuilder bytesRef = new BytesRefBuilder(); - LegacyNumericUtils.intToPrefixCoded(parseValue(value), 0, bytesRef); // 0 because of exact match - return bytesRef.get(); - } - - @Override - public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - return LegacyNumericRangeQuery.newIntRange(name(), numericPrecisionStep(), - lowerTerm == null ? null : (int)parseValue(lowerTerm), - upperTerm == null ? null : (int)parseValue(upperTerm), - includeLower, includeUpper); - } - - @Override - public FieldStats.Long stats(IndexReader reader) throws IOException { - int maxDoc = reader.maxDoc(); - Terms terms = org.apache.lucene.index.MultiFields.getTerms(reader, name()); - if (terms == null) { - return null; - } - long minValue = LegacyNumericUtils.getMinInt(terms); - long maxValue = LegacyNumericUtils.getMaxInt(terms); - return new FieldStats.Long( - maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), - isSearchable(), isAggregatable(), minValue, maxValue); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - failIfNoDocValues(); - return new DocValuesIndexFieldData.Builder().numericType(NumericType.SHORT); - } - } - - protected LegacyShortFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Explicit ignoreMalformed, Explicit coerce, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - } - - @Override - public ShortFieldType fieldType() { - return (ShortFieldType) super.fieldType(); - } - - private static short parseValue(Object value) { - if (value instanceof Number) { - return ((Number) value).shortValue(); - } - if (value instanceof BytesRef) { - return Short.parseShort(((BytesRef) value).utf8ToString()); - } - return Short.parseShort(value.toString()); - } - - @Override - protected boolean customBoost() { - return true; - } - - @Override - protected void innerParseCreateField(ParseContext context, List fields) throws IOException { - short value; - float boost = fieldType().boost(); - if (context.externalValueSet()) { - Object externalValue = context.externalValue(); - if (externalValue == null) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else if (externalValue instanceof String) { - String sExternalValue = (String) externalValue; - if (sExternalValue.length() == 0) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - } else { - value = Short.parseShort(sExternalValue); - } - } else { - value = ((Number) externalValue).shortValue(); - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), Short.toString(value), boost); - } - } else { - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL || - (parser.currentToken() == XContentParser.Token.VALUE_STRING && parser.textLength() == 0)) { - if (fieldType().nullValue() == null) { - return; - } - value = fieldType().nullValue(); - if (fieldType().nullValueAsString() != null && (context.includeInAll(includeInAll, this))) { - context.allEntries().addText(fieldType().name(), fieldType().nullValueAsString(), boost); - } - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - Short objValue = fieldType().nullValue(); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { - objValue = parser.shortValue(coerce.value()); - } - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - if (objValue == null) { - // no value - return; - } - value = objValue; - } else { - value = parser.shortValue(coerce.value()); - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), parser.text(), boost); - } - } - } - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - CustomShortNumericField field = new CustomShortNumericField(value, fieldType()); - if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(boost); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - addDocValue(context, fields, value); - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - if (includeDefaults || fieldType().numericPrecisionStep() != DEFAULT_PRECISION_STEP) { - builder.field("precision_step", fieldType().numericPrecisionStep()); - } - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - } - - public static class CustomShortNumericField extends CustomNumericField { - - private final short number; - - public CustomShortNumericField(short number, NumberFieldType fieldType) { - super(number, fieldType); - this.number = number; - } - - @Override - public TokenStream tokenStream(Analyzer analyzer, TokenStream previous) { - if (fieldType().indexOptions() != IndexOptions.NONE) { - return getCachedStream().setIntValue(number); - } - return null; - } - - @Override - public String numericAsString() { - return Short.toString(number); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java deleted file mode 100644 index 7981b400214..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapper.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; -import org.apache.lucene.document.Field; -import org.elasticsearch.Version; -import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.mapper.StringFieldMapper.ValueAndBoost; - -import java.io.IOException; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.apache.lucene.index.IndexOptions.NONE; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeIntegerValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseNumberField; - -/** - * A {@link FieldMapper} that takes a string and writes a count of the tokens in that string - * to the index. In most ways the mapper acts just like an {@link LegacyIntegerFieldMapper}. - */ -public class LegacyTokenCountFieldMapper extends LegacyIntegerFieldMapper { - public static final String CONTENT_TYPE = "token_count"; - - public static class Defaults extends LegacyIntegerFieldMapper.Defaults { - - } - - public static class Builder extends LegacyNumberFieldMapper.Builder { - private NamedAnalyzer analyzer; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.PRECISION_STEP_32_BIT); - builder = this; - } - - public Builder analyzer(NamedAnalyzer analyzer) { - this.analyzer = analyzer; - return this; - } - - public NamedAnalyzer analyzer() { - return analyzer; - } - - @Override - public LegacyTokenCountFieldMapper build(BuilderContext context) { - if (context.indexCreatedVersion().onOrAfter(Version.V_5_0_0_alpha2)) { - throw new IllegalStateException("Cannot use legacy numeric types after 5.0"); - } - setupFieldType(context); - return new LegacyTokenCountFieldMapper(name, fieldType, defaultFieldType, - ignoreMalformed(context), coerce(context), includeInAll, context.indexSettings(), - analyzer, multiFieldsBuilder.build(this, context), copyTo); - } - - @Override - protected int maxPrecisionStep() { - return 32; - } - } - - public static class TypeParser implements Mapper.TypeParser { - @Override - @SuppressWarnings("unchecked") - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - LegacyTokenCountFieldMapper.Builder builder = new LegacyTokenCountFieldMapper.Builder(name); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - builder.nullValue(nodeIntegerValue(propNode)); - iterator.remove(); - } else if (propName.equals("analyzer")) { - NamedAnalyzer analyzer = parserContext.getIndexAnalyzers().get(propNode.toString()); - if (analyzer == null) { - throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]"); - } - builder.analyzer(analyzer); - iterator.remove(); - } - } - parseNumberField(builder, name, node, parserContext); - if (builder.analyzer() == null) { - throw new MapperParsingException("Analyzer must be set for field [" + name + "] but wasn't."); - } - return builder; - } - } - - private NamedAnalyzer analyzer; - - protected LegacyTokenCountFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit ignoreMalformed, - Explicit coerce, Boolean includeInAll, Settings indexSettings, NamedAnalyzer analyzer, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, ignoreMalformed, coerce, includeInAll, indexSettings, multiFields, copyTo); - this.analyzer = analyzer; - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - ValueAndBoost valueAndBoost = StringFieldMapper.parseCreateFieldForString(context, null /* Out null value is an int so we convert*/, fieldType().boost()); - if (valueAndBoost.value() == null && fieldType().nullValue() == null) { - return; - } - - if (fieldType().indexOptions() != NONE || fieldType().stored() || fieldType().hasDocValues()) { - int count; - if (valueAndBoost.value() == null) { - count = fieldType().nullValue(); - } else { - count = countPositions(analyzer, simpleName(), valueAndBoost.value()); - } - addIntegerFields(context, fields, count, valueAndBoost.boost()); - } - } - - /** - * Count position increments in a token stream. Package private for testing. - * @param analyzer analyzer to create token stream - * @param fieldName field name to pass to analyzer - * @param fieldValue field value to pass to analyzer - * @return number of position increments in a token stream - * @throws IOException if tokenStream throws it - */ - static int countPositions(Analyzer analyzer, String fieldName, String fieldValue) throws IOException { - try (TokenStream tokenStream = analyzer.tokenStream(fieldName, fieldValue)) { - int count = 0; - PositionIncrementAttribute position = tokenStream.addAttribute(PositionIncrementAttribute.class); - tokenStream.reset(); - while (tokenStream.incrementToken()) { - count += position.getPositionIncrement(); - } - tokenStream.end(); - count += position.getPositionIncrement(); - return count; - } - } - - /** - * Name of analyzer. - * @return name of analyzer - */ - public String analyzer() { - return analyzer.name(); - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - super.doMerge(mergeWith, updateAllTypes); - this.analyzer = ((LegacyTokenCountFieldMapper) mergeWith).analyzer; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - - builder.field("analyzer", analyzer()); - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java index 5b80986b142..1c07420d262 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MappedFieldType.java @@ -35,6 +35,7 @@ import org.apache.lucene.search.BoostQuery; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.lucene.all.AllTermQuery; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -463,6 +464,12 @@ public abstract class MappedFieldType extends FieldType { while (termQuery instanceof BoostQuery) { termQuery = ((BoostQuery) termQuery).getQuery(); } + if (termQuery instanceof AllTermQuery) { + return ((AllTermQuery) termQuery).getTerm(); + } else if (termQuery instanceof TypeFieldMapper.TypesQuery) { + assert ((TypeFieldMapper.TypesQuery) termQuery).getTerms().length == 1; + return new Term(TypeFieldMapper.NAME, ((TypeFieldMapper.TypesQuery) termQuery).getTerms()[0]); + } if (termQuery instanceof TermQuery == false) { throw new IllegalArgumentException("Cannot extract a term from a query of type " + termQuery.getClass() + ": " + termQuery); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index afdb6c83d50..d903ab109df 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -29,13 +29,13 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.PointValues; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.settings.Setting; @@ -47,7 +47,6 @@ import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper.Defaults; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.DocValueFormat; import org.joda.time.DateTimeZone; @@ -66,6 +65,11 @@ public class NumberFieldMapper extends FieldMapper { static final Setting COERCE_SETTING = Setting.boolSetting("index.mapping.coerce", true, Property.IndexScope); + public static class Defaults { + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); + public static final Explicit COERCE = new Explicit<>(true, false); + } + public static class Builder extends FieldMapper.Builder { private Boolean ignoreMalformed; @@ -130,24 +134,6 @@ public class NumberFieldMapper extends FieldMapper { @Override public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) { - switch (type) { - case BYTE: - return new LegacyByteFieldMapper.TypeParser().parse(name, node, parserContext); - case SHORT: - return new LegacyShortFieldMapper.TypeParser().parse(name, node, parserContext); - case INTEGER: - return new LegacyIntegerFieldMapper.TypeParser().parse(name, node, parserContext); - case LONG: - return new LegacyLongFieldMapper.TypeParser().parse(name, node, parserContext); - case FLOAT: - return new LegacyFloatFieldMapper.TypeParser().parse(name, node, parserContext); - case DOUBLE: - return new LegacyDoubleFieldMapper.TypeParser().parse(name, node, parserContext); - default: - throw new AssertionError(); - } - } Builder builder = new Builder(name, type); TypeParsers.parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { @@ -895,7 +881,7 @@ public class NumberFieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { final boolean includeInAll = context.includeInAll(this.includeInAll, this); XContentParser parser = context.parser(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParentFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ParentFieldMapper.java index 9caef2c7740..677da0e5f4e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParentFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParentFieldMapper.java @@ -22,6 +22,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -227,7 +228,7 @@ public class ParentFieldMapper extends MetadataFieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { boolean parent = context.docMapper().isParent(context.sourceToParse().type()); if (parent) { fields.add(new SortedDocValuesField(parentJoinField.fieldType().name(), new BytesRef(context.sourceToParse().id()))); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java b/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java index dc0ba197b15..eb42c3b9649 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ParsedDocument.java @@ -39,10 +39,6 @@ public class ParsedDocument { private final String routing; - private final long timestamp; - - private final long ttl; - private final List documents; private BytesReference source; @@ -57,8 +53,6 @@ public class ParsedDocument { String id, String type, String routing, - long timestamp, - long ttl, List documents, BytesReference source, Mapping dynamicMappingsUpdate) { @@ -68,8 +62,6 @@ public class ParsedDocument { this.type = type; this.uid = Uid.createUidAsBytes(type, id); this.routing = routing; - this.timestamp = timestamp; - this.ttl = ttl; this.documents = documents; this.source = source; this.dynamicMappingsUpdate = dynamicMappingsUpdate; @@ -99,14 +91,6 @@ public class ParsedDocument { return this.routing; } - public long timestamp() { - return this.timestamp; - } - - public long ttl() { - return this.ttl; - } - public Document rootDoc() { return documents.get(documents.size() - 1); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java new file mode 100644 index 00000000000..214cc52c3ee --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/mapper/RangeFieldMapper.java @@ -0,0 +1,772 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANYDa + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import org.apache.lucene.document.Field; +import org.apache.lucene.document.DoubleRangeField; +import org.apache.lucene.document.FloatRangeField; +import org.apache.lucene.document.IntRangeField; +import org.apache.lucene.document.LongRangeField; +import org.apache.lucene.document.StoredField; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.joda.DateMathParser; +import org.elasticsearch.common.joda.FormatDateTimeFormatter; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.LocaleUtils; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; +import org.elasticsearch.index.query.QueryShardContext; +import org.joda.time.DateTimeZone; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter; +import static org.elasticsearch.index.query.RangeQueryBuilder.GT_FIELD; +import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD; +import static org.elasticsearch.index.query.RangeQueryBuilder.LT_FIELD; +import static org.elasticsearch.index.query.RangeQueryBuilder.LTE_FIELD; + +/** A {@link FieldMapper} for indexing numeric and date ranges, and creating queries */ +public class RangeFieldMapper extends FieldMapper { + public static final boolean DEFAULT_INCLUDE_UPPER = true; + public static final boolean DEFAULT_INCLUDE_LOWER = true; + + public static class Defaults { + public static final Explicit COERCE = new Explicit<>(true, false); + } + + // this is private since it has a different default + static final Setting COERCE_SETTING = + Setting.boolSetting("index.mapping.coerce", true, Setting.Property.IndexScope); + + public static class Builder extends FieldMapper.Builder { + private Boolean coerce; + private Locale locale; + + public Builder(String name, RangeType type) { + super(name, new RangeFieldType(type), new RangeFieldType(type)); + builder = this; + locale = Locale.ROOT; + } + + @Override + public RangeFieldType fieldType() { + return (RangeFieldType)fieldType; + } + + @Override + public Builder docValues(boolean docValues) { + if (docValues == true) { + throw new IllegalArgumentException("field [" + name + "] does not currently support " + TypeParsers.DOC_VALUES); + } + return super.docValues(docValues); + } + + public Builder coerce(boolean coerce) { + this.coerce = coerce; + return builder; + } + + protected Explicit coerce(BuilderContext context) { + if (coerce != null) { + return new Explicit<>(coerce, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(COERCE_SETTING.get(context.indexSettings()), false); + } + return Defaults.COERCE; + } + + public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { + fieldType().setDateTimeFormatter(dateTimeFormatter); + return this; + } + + @Override + public Builder nullValue(Object nullValue) { + throw new IllegalArgumentException("Field [" + name() + "] does not support null value."); + } + + public void locale(Locale locale) { + this.locale = locale; + } + + @Override + protected void setupFieldType(BuilderContext context) { + super.setupFieldType(context); + FormatDateTimeFormatter dateTimeFormatter = fieldType().dateTimeFormatter; + if (fieldType().rangeType == RangeType.DATE) { + if (!locale.equals(dateTimeFormatter.locale())) { + fieldType().setDateTimeFormatter(new FormatDateTimeFormatter(dateTimeFormatter.format(), + dateTimeFormatter.parser(), dateTimeFormatter.printer(), locale)); + } + } else if (dateTimeFormatter != null) { + throw new IllegalArgumentException("field [" + name() + "] of type [" + fieldType().rangeType + + "] should not define a dateTimeFormatter unless it is a " + RangeType.DATE + " type"); + } + } + + @Override + public RangeFieldMapper build(BuilderContext context) { + setupFieldType(context); + return new RangeFieldMapper(name, fieldType, defaultFieldType, coerce(context), includeInAll, + context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); + } + } + + public static class TypeParser implements Mapper.TypeParser { + final RangeType type; + + public TypeParser(RangeType type) { + this.type = type; + } + + @Override + public Mapper.Builder parse(String name, Map node, + ParserContext parserContext) throws MapperParsingException { + Builder builder = new Builder(name, type); + TypeParsers.parseField(builder, name, node, parserContext); + for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { + Map.Entry entry = iterator.next(); + String propName = entry.getKey(); + Object propNode = entry.getValue(); + if (propName.equals("null_value")) { + throw new MapperParsingException("Property [null_value] is not supported for [" + this.type.name + + "] field types."); + } else if (propName.equals("coerce")) { + builder.coerce(TypeParsers.nodeBooleanValue("coerce", propNode, parserContext)); + iterator.remove(); + } else if (propName.equals("locale")) { + builder.locale(LocaleUtils.parse(propNode.toString())); + iterator.remove(); + } else if (propName.equals("format")) { + builder.dateTimeFormatter(parseDateTimeFormatter(propNode)); + iterator.remove(); + } else if (TypeParsers.parseMultiField(builder, name, parserContext, propName, propNode)) { + iterator.remove(); + } + } + return builder; + } + } + + public static final class RangeFieldType extends MappedFieldType { + protected RangeType rangeType; + protected FormatDateTimeFormatter dateTimeFormatter; + protected DateMathParser dateMathParser; + + public RangeFieldType(RangeType type) { + super(); + this.rangeType = Objects.requireNonNull(type); + setTokenized(false); + setHasDocValues(false); + setOmitNorms(true); + if (rangeType == RangeType.DATE) { + setDateTimeFormatter(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER); + } + } + + public RangeFieldType(RangeFieldType other) { + super(other); + this.rangeType = other.rangeType; + if (other.dateTimeFormatter() != null) { + setDateTimeFormatter(other.dateTimeFormatter); + } + } + + @Override + public MappedFieldType clone() { + return new RangeFieldType(this); + } + + @Override + public boolean equals(Object o) { + if (!super.equals(o)) return false; + RangeFieldType that = (RangeFieldType) o; + return Objects.equals(rangeType, that.rangeType) && + (rangeType == RangeType.DATE) ? + Objects.equals(dateTimeFormatter.format(), that.dateTimeFormatter.format()) + && Objects.equals(dateTimeFormatter.locale(), that.dateTimeFormatter.locale()) + : dateTimeFormatter == null && that.dateTimeFormatter == null; + } + + @Override + public int hashCode() { + return (dateTimeFormatter == null) ? Objects.hash(super.hashCode(), rangeType) + : Objects.hash(super.hashCode(), rangeType, dateTimeFormatter.format(), dateTimeFormatter.locale()); + } + + @Override + public String typeName() { + return rangeType.name; + } + + @Override + public void checkCompatibility(MappedFieldType fieldType, List conflicts, boolean strict) { + super.checkCompatibility(fieldType, conflicts, strict); + if (strict) { + RangeFieldType other = (RangeFieldType)fieldType; + if (this.rangeType != other.rangeType) { + conflicts.add("mapper [" + name() + + "] is attempting to update from type [" + rangeType.name + + "] to incompatible type [" + other.rangeType.name + "]."); + } + if (this.rangeType == RangeType.DATE) { + if (Objects.equals(dateTimeFormatter().format(), other.dateTimeFormatter().format()) == false) { + conflicts.add("mapper [" + name() + + "] is used by multiple types. Set update_all_types to true to update [format] across all types."); + } + if (Objects.equals(dateTimeFormatter().locale(), other.dateTimeFormatter().locale()) == false) { + conflicts.add("mapper [" + name() + + "] is used by multiple types. Set update_all_types to true to update [locale] across all types."); + } + } + } + } + + public FormatDateTimeFormatter dateTimeFormatter() { + return dateTimeFormatter; + } + + public void setDateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { + checkIfFrozen(); + this.dateTimeFormatter = dateTimeFormatter; + this.dateMathParser = new DateMathParser(dateTimeFormatter); + } + + protected DateMathParser dateMathParser() { + return dateMathParser; + } + + @Override + public Query termQuery(Object value, QueryShardContext context) { + Query query = rangeQuery(value, value, true, true, context); + if (boost() != 1f) { + query = new BoostQuery(query, boost()); + } + return query; + } + + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, + QueryShardContext context) { + return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, ShapeRelation.INTERSECTS, context); + } + + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, + ShapeRelation relation, QueryShardContext context) { + failIfNotIndexed(); + return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, relation, null, dateMathParser, context); + } + + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, + ShapeRelation relation, DateTimeZone timeZone, DateMathParser parser, QueryShardContext context) { + return rangeType.rangeQuery(name(), lowerTerm, upperTerm, includeLower, includeUpper, relation, timeZone, parser, context); + } + } + + private Boolean includeInAll; + private Explicit coerce; + + private RangeFieldMapper( + String simpleName, + MappedFieldType fieldType, + MappedFieldType defaultFieldType, + Explicit coerce, + Boolean includeInAll, + Settings indexSettings, + MultiFields multiFields, + CopyTo copyTo) { + super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); + this.coerce = coerce; + this.includeInAll = includeInAll; + } + + @Override + public RangeFieldType fieldType() { + return (RangeFieldType) super.fieldType(); + } + + @Override + protected String contentType() { + return fieldType.typeName(); + } + + @Override + protected RangeFieldMapper clone() { + return (RangeFieldMapper) super.clone(); + } + + @Override + protected void parseCreateField(ParseContext context, List fields) throws IOException { + final boolean includeInAll = context.includeInAll(this.includeInAll, this); + Range range; + if (context.externalValueSet()) { + range = context.parseExternalValue(Range.class); + } else { + XContentParser parser = context.parser(); + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + RangeFieldType fieldType = fieldType(); + RangeType rangeType = fieldType.rangeType; + String fieldName = null; + Number from = rangeType.minValue(); + Number to = rangeType.maxValue(); + boolean includeFrom = DEFAULT_INCLUDE_LOWER; + boolean includeTo = DEFAULT_INCLUDE_UPPER; + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = parser.currentName(); + } else { + if (fieldName.equals(GT_FIELD.getPreferredName())) { + includeFrom = false; + if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { + from = rangeType.parseFrom(fieldType, parser, coerce.value(), includeFrom); + } + } else if (fieldName.equals(GTE_FIELD.getPreferredName())) { + includeFrom = true; + if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { + from = rangeType.parseFrom(fieldType, parser, coerce.value(), includeFrom); + } + } else if (fieldName.equals(LT_FIELD.getPreferredName())) { + includeTo = false; + if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { + to = rangeType.parseTo(fieldType, parser, coerce.value(), includeTo); + } + } else if (fieldName.equals(LTE_FIELD.getPreferredName())) { + includeTo = true; + if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { + to = rangeType.parseTo(fieldType, parser, coerce.value(), includeTo); + } + } else { + throw new MapperParsingException("error parsing field [" + + name() + "], with unknown parameter [" + fieldName + "]"); + } + } + } + range = new Range(rangeType, from, to, includeFrom, includeTo); + } else { + throw new MapperParsingException("error parsing field [" + + name() + "], expected an object but got " + parser.currentName()); + } + } + if (includeInAll) { + context.allEntries().addText(fieldType.name(), range.toString(), fieldType.boost()); + } + boolean indexed = fieldType.indexOptions() != IndexOptions.NONE; + boolean docValued = fieldType.hasDocValues(); + boolean stored = fieldType.stored(); + fields.addAll(fieldType().rangeType.createFields(name(), range, indexed, docValued, stored)); + } + + @Override + protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { + super.doMerge(mergeWith, updateAllTypes); + RangeFieldMapper other = (RangeFieldMapper) mergeWith; + this.includeInAll = other.includeInAll; + if (other.coerce.explicit()) { + this.coerce = other.coerce; + } + } + + @Override + protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { + super.doXContentBody(builder, includeDefaults, params); + + if (includeDefaults || (fieldType().dateTimeFormatter() != null + && fieldType().dateTimeFormatter().format().equals(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.format()) == false)) { + builder.field("format", fieldType().dateTimeFormatter().format()); + } + if (includeDefaults || (fieldType().dateTimeFormatter() != null + && fieldType().dateTimeFormatter().locale() != Locale.ROOT)) { + builder.field("locale", fieldType().dateTimeFormatter().locale()); + } + if (includeDefaults || coerce.explicit()) { + builder.field("coerce", coerce.value()); + } + if (includeInAll != null) { + builder.field("include_in_all", includeInAll); + } else if (includeDefaults) { + builder.field("include_in_all", false); + } + } + + /** Enum defining the type of range */ + public enum RangeType { + DATE("date_range", NumberType.LONG) { + @Override + public Field getRangeField(String name, Range r) { + return new LongRangeField(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()}); + } + private Number parse(DateMathParser dateMathParser, String dateStr) { + return dateMathParser.parse(dateStr, () -> {throw new IllegalArgumentException("now is not used at indexing time");}); + } + @Override + public Number parseFrom(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) + throws IOException { + Number value = parse(fieldType.dateMathParser, parser.text()); + return included ? value : nextUp(value); + } + @Override + public Number parseTo(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) + throws IOException{ + Number value = parse(fieldType.dateMathParser, parser.text()); + return included ? value : nextDown(value); + } + @Override + public Long minValue() { + return Long.MIN_VALUE; + } + @Override + public Long maxValue() { + return Long.MAX_VALUE; + } + @Override + public Number nextUp(Number value) { + return LONG.nextUp(value); + } + @Override + public Number nextDown(Number value) { + return LONG.nextDown(value); + } + @Override + public byte[] getBytes(Range r) { + return LONG.getBytes(r); + } + @Override + public Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, + ShapeRelation relation, @Nullable DateTimeZone timeZone, @Nullable DateMathParser parser, + QueryShardContext context) { + DateTimeZone zone = (timeZone == null) ? DateTimeZone.UTC : timeZone; + DateMathParser dateMathParser = (parser == null) ? + new DateMathParser(DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER) : parser; + Long low = lowerTerm == null ? Long.MIN_VALUE : + dateMathParser.parse(lowerTerm instanceof BytesRef ? ((BytesRef) lowerTerm).utf8ToString() : lowerTerm.toString(), + context::nowInMillis, false, zone); + Long high = upperTerm == null ? Long.MAX_VALUE : + dateMathParser.parse(upperTerm instanceof BytesRef ? ((BytesRef) upperTerm).utf8ToString() : upperTerm.toString(), + context::nowInMillis, false, zone); + + return super.rangeQuery(field, low, high, includeLower, includeUpper, relation, zone, dateMathParser, context); + } + @Override + public Query withinQuery(String field, Number from, Number to, boolean includeLower, boolean includeUpper) { + return LONG.withinQuery(field, from, to, includeLower, includeUpper); + } + @Override + public Query containsQuery(String field, Number from, Number to, boolean includeLower, boolean includeUpper) { + return LONG.containsQuery(field, from, to, includeLower, includeUpper); + } + @Override + public Query intersectsQuery(String field, Number from, Number to, boolean includeLower, boolean includeUpper) { + return LONG.intersectsQuery(field, from, to, includeLower, includeUpper); + } + }, + // todo support half_float + FLOAT("float_range", NumberType.FLOAT) { + @Override + public Float minValue() { + return Float.NEGATIVE_INFINITY; + } + @Override + public Float maxValue() { + return Float.POSITIVE_INFINITY; + } + @Override + public Float nextUp(Number value) { + return Math.nextUp(value.floatValue()); + } + @Override + public Float nextDown(Number value) { + return Math.nextDown(value.floatValue()); + } + @Override + public Field getRangeField(String name, Range r) { + return new FloatRangeField(name, new float[] {r.from.floatValue()}, new float[] {r.to.floatValue()}); + } + @Override + public byte[] getBytes(Range r) { + byte[] b = new byte[Float.BYTES*2]; + NumericUtils.intToSortableBytes(NumericUtils.floatToSortableInt(r.from.floatValue()), b, 0); + NumericUtils.intToSortableBytes(NumericUtils.floatToSortableInt(r.to.floatValue()), b, Float.BYTES); + return b; + } + @Override + public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return FloatRangeField.newWithinQuery(field, + new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)}, + new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)}); + } + @Override + public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return FloatRangeField.newContainsQuery(field, + new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)}, + new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)}); + } + @Override + public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return FloatRangeField.newIntersectsQuery(field, + new float[] {includeFrom ? (Float)from : Math.nextUp((Float)from)}, + new float[] {includeTo ? (Float)to : Math.nextDown((Float)to)}); + } + }, + DOUBLE("double_range", NumberType.DOUBLE) { + @Override + public Double minValue() { + return Double.NEGATIVE_INFINITY; + } + @Override + public Double maxValue() { + return Double.POSITIVE_INFINITY; + } + @Override + public Double nextUp(Number value) { + return Math.nextUp(value.doubleValue()); + } + @Override + public Double nextDown(Number value) { + return Math.nextDown(value.doubleValue()); + } + @Override + public Field getRangeField(String name, Range r) { + return new DoubleRangeField(name, new double[] {r.from.doubleValue()}, new double[] {r.to.doubleValue()}); + } + @Override + public byte[] getBytes(Range r) { + byte[] b = new byte[Double.BYTES*2]; + NumericUtils.longToSortableBytes(NumericUtils.doubleToSortableLong(r.from.doubleValue()), b, 0); + NumericUtils.longToSortableBytes(NumericUtils.doubleToSortableLong(r.to.doubleValue()), b, Double.BYTES); + return b; + } + @Override + public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return DoubleRangeField.newWithinQuery(field, + new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)}, + new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)}); + } + @Override + public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return DoubleRangeField.newContainsQuery(field, + new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)}, + new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)}); + } + @Override + public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return DoubleRangeField.newIntersectsQuery(field, + new double[] {includeFrom ? (Double)from : Math.nextUp((Double)from)}, + new double[] {includeTo ? (Double)to : Math.nextDown((Double)to)}); + } + }, + // todo add BYTE support + // todo add SHORT support + INTEGER("integer_range", NumberType.INTEGER) { + @Override + public Integer minValue() { + return Integer.MIN_VALUE; + } + @Override + public Integer maxValue() { + return Integer.MAX_VALUE; + } + @Override + public Integer nextUp(Number value) { + return value.intValue() + 1; + } + @Override + public Integer nextDown(Number value) { + return value.intValue() - 1; + } + @Override + public Field getRangeField(String name, Range r) { + return new IntRangeField(name, new int[] {r.from.intValue()}, new int[] {r.to.intValue()}); + } + @Override + public byte[] getBytes(Range r) { + byte[] b = new byte[Integer.BYTES*2]; + NumericUtils.intToSortableBytes(r.from.intValue(), b, 0); + NumericUtils.intToSortableBytes(r.to.intValue(), b, Integer.BYTES); + return b; + } + @Override + public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return IntRangeField.newWithinQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)}, + new int[] {(Integer)to - (includeTo ? 0 : 1)}); + } + @Override + public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return IntRangeField.newContainsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)}, + new int[] {(Integer)to - (includeTo ? 0 : 1)}); + } + @Override + public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return IntRangeField.newIntersectsQuery(field, new int[] {(Integer)from + (includeFrom ? 0 : 1)}, + new int[] {(Integer)to - (includeTo ? 0 : 1)}); + } + }, + LONG("long_range", NumberType.LONG) { + @Override + public Long minValue() { + return Long.MIN_VALUE; + } + @Override + public Long maxValue() { + return Long.MAX_VALUE; + } + @Override + public Long nextUp(Number value) { + return value.longValue() + 1; + } + @Override + public Long nextDown(Number value) { + return value.longValue() - 1; + } + @Override + public Field getRangeField(String name, Range r) { + return new LongRangeField(name, new long[] {r.from.longValue()}, new long[] {r.to.longValue()}); + } + @Override + public byte[] getBytes(Range r) { + byte[] b = new byte[Long.BYTES*2]; + long from = r.from == null ? Long.MIN_VALUE : r.from.longValue(); + long to = r.to == null ? Long.MAX_VALUE : r.to.longValue(); + NumericUtils.longToSortableBytes(from, b, 0); + NumericUtils.longToSortableBytes(to, b, Long.BYTES); + return b; + } + @Override + public Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return LongRangeField.newWithinQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)}, + new long[] {(Long)to - (includeTo ? 0 : 1)}); + } + @Override + public Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return LongRangeField.newContainsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)}, + new long[] {(Long)to - (includeTo ? 0 : 1)}); + } + @Override + public Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo) { + return LongRangeField.newIntersectsQuery(field, new long[] {(Long)from + (includeFrom ? 0 : 1)}, + new long[] {(Long)to - (includeTo ? 0 : 1)}); + } + }; + + RangeType(String name, NumberType type) { + this.name = name; + this.numberType = type; + } + + /** Get the associated type name. */ + public final String typeName() { + return name; + } + + protected abstract byte[] getBytes(Range range); + public abstract Field getRangeField(String name, Range range); + public List createFields(String name, Range range, boolean indexed, boolean docValued, boolean stored) { + assert range != null : "range cannot be null when creating fields"; + List fields = new ArrayList<>(); + if (indexed) { + fields.add(getRangeField(name, range)); + } + // todo add docValues ranges once aggregations are supported + if (stored) { + fields.add(new StoredField(name, range.toString())); + } + return fields; + } + /** parses from value. rounds according to included flag */ + public Number parseFrom(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) throws IOException { + Number value = numberType.parse(parser, coerce); + return included ? value : nextUp(value); + } + /** parses to value. rounds according to included flag */ + public Number parseTo(RangeFieldType fieldType, XContentParser parser, boolean coerce, boolean included) throws IOException { + Number value = numberType.parse(parser, coerce); + return included ? value : nextDown(value); + } + + public abstract Number minValue(); + public abstract Number maxValue(); + public abstract Number nextUp(Number value); + public abstract Number nextDown(Number value); + public abstract Query withinQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo); + public abstract Query containsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo); + public abstract Query intersectsQuery(String field, Number from, Number to, boolean includeFrom, boolean includeTo); + + public Query rangeQuery(String field, Object from, Object to, boolean includeFrom, boolean includeTo, + ShapeRelation relation, @Nullable DateTimeZone timeZone, @Nullable DateMathParser dateMathParser, + QueryShardContext context) { + Number lower = from == null ? minValue() : numberType.parse(from); + Number upper = to == null ? maxValue() : numberType.parse(to); + if (relation == ShapeRelation.WITHIN) { + return withinQuery(field, lower, upper, includeFrom, includeTo); + } else if (relation == ShapeRelation.CONTAINS) { + return containsQuery(field, lower, upper, includeFrom, includeTo); + } + return intersectsQuery(field, lower, upper, includeFrom, includeTo); + } + + public final String name; + private final NumberType numberType; + } + + /** Class defining a range */ + public static class Range { + RangeType type; + private Number from; + private Number to; + private boolean includeFrom; + private boolean includeTo; + + public Range(RangeType type, Number from, Number to, boolean includeFrom, boolean includeTo) { + this.type = type; + this.from = from; + this.to = to; + this.includeFrom = includeFrom; + this.includeTo = includeTo; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(includeFrom ? '[' : '('); + sb.append(includeFrom || from.equals(type.minValue()) ? from : type.nextDown(from)); + sb.append(':'); + sb.append(includeTo || to.equals(type.maxValue()) ? to : type.nextUp(to)); + sb.append(includeTo ? ']' : ')'); + return sb.toString(); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java index 14978bc6728..fe2575b9345 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/RoutingFieldMapper.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -154,7 +155,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { String routing = context.sourceToParse().routing(); if (routing != null) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index 6db82f7ff72..2a76aa1addd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.mapper; -import org.apache.lucene.document.Field; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; @@ -49,7 +49,7 @@ import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper.Defaults; +import org.elasticsearch.index.mapper.NumberFieldMapper.Defaults; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.DocValueFormat; @@ -364,7 +364,7 @@ public class ScaledFloatFieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { final boolean includeInAll = context.includeInAll(this.includeInAll, this); XContentParser parser = context.parser(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java index defe0fa8cde..b52d1262796 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/SourceFieldMapper.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; @@ -227,7 +228,7 @@ public class SourceFieldMapper extends MetadataFieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { if (!enabled) { return; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java b/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java index 14f6e9a8587..0cafc50bbe2 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/SourceToParse.java @@ -23,7 +23,6 @@ import java.util.Objects; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.unit.TimeValue; public class SourceToParse { @@ -49,10 +48,6 @@ public class SourceToParse { private String parentId; - private long timestamp; - - private long ttl; - private SourceToParse(Origin origin, String index, String type, String id, BytesReference source) { this.origin = Objects.requireNonNull(origin); this.index = Objects.requireNonNull(index); @@ -101,38 +96,6 @@ public class SourceToParse { return this; } - public long timestamp() { - return this.timestamp; - } - - public SourceToParse timestamp(String timestamp) { - this.timestamp = Long.parseLong(timestamp); - return this; - } - - public SourceToParse timestamp(long timestamp) { - this.timestamp = timestamp; - return this; - } - - public long ttl() { - return this.ttl; - } - - public SourceToParse ttl(TimeValue ttl) { - if (ttl == null) { - this.ttl = -1; - return this; - } - this.ttl = ttl.millis(); - return this; - } - - public SourceToParse ttl(long ttl) { - this.ttl = ttl; - return this; - } - public enum Origin { PRIMARY, REPLICA diff --git a/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java deleted file mode 100644 index d290ef0fb51..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/StringFieldMapper.java +++ /dev/null @@ -1,694 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.logging.log4j.Logger; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.SortedSetDocValuesField; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.support.XContentMapValues; -import org.elasticsearch.index.analysis.NamedAnalyzer; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; - -import static org.apache.lucene.index.IndexOptions.NONE; -import static org.elasticsearch.index.mapper.TypeParsers.parseTextField; - -public class StringFieldMapper extends FieldMapper { - - public static final String CONTENT_TYPE = "string"; - private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; - - // If a string field is created on 5.x and all parameters are in this list then we - // will automatically upgrade to a text/keyword field. Otherwise we will just fail - // saying that string fields are not supported anymore. - private static final Set SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE_TO_KEYWORD = new HashSet<>(Arrays.asList( - "type", - // common keyword parameters, for which the upgrade is straightforward - "index", "store", "doc_values", "omit_norms", "norms", "fields", "copy_to", - "fielddata", "include_in_all", "ignore_above")); - private static final Set SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE_TO_TEXT = new HashSet<>(Arrays.asList( - "type", - // common text parameters, for which the upgrade is straightforward - "index", "store", "doc_values", "omit_norms", "norms", "fields", "copy_to", - "fielddata", "include_in_all", "analyzer", "search_analyzer", "search_quote_analyzer", - "index_options", "position_increment_gap")); - - public static class Defaults { - public static double FIELDDATA_MIN_FREQUENCY = 0; - public static double FIELDDATA_MAX_FREQUENCY = Integer.MAX_VALUE; - public static int FIELDDATA_MIN_SEGMENT_SIZE = 0; - - public static final MappedFieldType FIELD_TYPE = new StringFieldType(); - - static { - FIELD_TYPE.freeze(); - } - - // NOTE, when adding defaults here, make sure you add them in the builder - public static final String NULL_VALUE = null; - - public static final int IGNORE_ABOVE = -1; - } - - public static class Builder extends FieldMapper.Builder { - - protected String nullValue = Defaults.NULL_VALUE; - - /** - * The distance between tokens from different values in the same field. - * POSITION_INCREMENT_GAP_USE_ANALYZER means default to the analyzer's - * setting which in turn defaults to Defaults.POSITION_INCREMENT_GAP. - */ - protected int positionIncrementGap = POSITION_INCREMENT_GAP_USE_ANALYZER; - - protected int ignoreAbove = Defaults.IGNORE_ABOVE; - - public Builder(String name) { - super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); - builder = this; - } - - @Override - public StringFieldType fieldType() { - return (StringFieldType) super.fieldType(); - } - - @Override - public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) { - super.searchAnalyzer(searchAnalyzer); - return this; - } - - public Builder positionIncrementGap(int positionIncrementGap) { - this.positionIncrementGap = positionIncrementGap; - return this; - } - - public Builder ignoreAbove(int ignoreAbove) { - this.ignoreAbove = ignoreAbove; - return this; - } - - public Builder fielddata(boolean fielddata) { - fieldType().setFielddata(fielddata); - return builder; - } - - public Builder eagerGlobalOrdinals(boolean eagerGlobalOrdinals) { - fieldType().setEagerGlobalOrdinals(eagerGlobalOrdinals); - return builder; - } - - public Builder fielddataFrequencyFilter(double minFreq, double maxFreq, int minSegmentSize) { - fieldType().setFielddataMinFrequency(minFreq); - fieldType().setFielddataMaxFrequency(maxFreq); - fieldType().setFielddataMinSegmentSize(minSegmentSize); - return builder; - } - - @Override - protected void setupFieldType(BuilderContext context) { - super.setupFieldType(context); - if (fieldType().hasDocValues() && ((StringFieldType) fieldType()).fielddata()) { - ((StringFieldType) fieldType()).setFielddata(false); - } - } - - @Override - public StringFieldMapper build(BuilderContext context) { - // if the field is not analyzed, then by default, we should omit norms and have docs only - // index options, as probably what the user really wants - // if they are set explicitly, we will use those values - // we also change the values on the default field type so that toXContent emits what - // differs from the defaults - if (fieldType.indexOptions() != IndexOptions.NONE && !fieldType.tokenized()) { - defaultFieldType.setOmitNorms(true); - defaultFieldType.setIndexOptions(IndexOptions.DOCS); - if (!omitNormsSet && fieldType.boost() == 1.0f) { - fieldType.setOmitNorms(true); - } - if (!indexOptionsSet) { - fieldType.setIndexOptions(IndexOptions.DOCS); - } - } - if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) { - if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) < 0) { - throw new IllegalArgumentException("Cannot set position_increment_gap on field [" - + name + "] without positions enabled"); - } - fieldType.setIndexAnalyzer(new NamedAnalyzer(fieldType.indexAnalyzer(), positionIncrementGap)); - fieldType.setSearchAnalyzer(new NamedAnalyzer(fieldType.searchAnalyzer(), positionIncrementGap)); - fieldType.setSearchQuoteAnalyzer(new NamedAnalyzer(fieldType.searchQuoteAnalyzer(), positionIncrementGap)); - } - setupFieldType(context); - return new StringFieldMapper( - name, fieldType(), defaultFieldType, positionIncrementGap, ignoreAbove, includeInAll, - context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo); - } - } - - public static class TypeParser implements Mapper.TypeParser { - private final DeprecationLogger deprecationLogger; - - public TypeParser() { - Logger logger = Loggers.getLogger(getClass()); - this.deprecationLogger = new DeprecationLogger(logger); - } - - @Override - public Mapper.Builder parse(String fieldName, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1)) { - final Object index = node.get("index"); - if (Arrays.asList(null, "no", "not_analyzed", "analyzed").contains(index) == false) { - throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [no], [not_analyzed] or [analyzed]"); - } - final boolean keyword = index != null && "analyzed".equals(index) == false; - - // Automatically upgrade simple mappings for ease of upgrade, otherwise fail - Set autoUpgradeParameters = keyword - ? SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE_TO_KEYWORD - : SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE_TO_TEXT; - if (autoUpgradeParameters.containsAll(node.keySet())) { - deprecationLogger.deprecated("The [string] field is deprecated, please use [text] or [keyword] instead on [{}]", - fieldName); - { - // upgrade the index setting - node.put("index", "no".equals(index) == false); - } - { - // upgrade norms settings - Object norms = node.remove("norms"); - if (norms instanceof Map) { - norms = ((Map) norms).get("enabled"); - } - if (norms != null) { - node.put("norms", TypeParsers.nodeBooleanValue("norms", norms, parserContext)); - } - Object omitNorms = node.remove("omit_norms"); - if (omitNorms != null) { - node.put("norms", TypeParsers.nodeBooleanValue("omit_norms", omitNorms, parserContext) == false); - } - } - { - // upgrade fielddata settings - Object fielddataO = node.get("fielddata"); - if (fielddataO instanceof Map) { - Map fielddata = (Map) fielddataO; - if (keyword == false) { - node.put("fielddata", "disabled".equals(fielddata.get("format")) == false); - Map fielddataFilter = (Map) fielddata.get("filter"); - if (fielddataFilter != null) { - Map frequencyFilter = (Map) fielddataFilter.get("frequency"); - frequencyFilter.keySet().retainAll(Arrays.asList("min", "max", "min_segment_size")); - node.put("fielddata_frequency_filter", frequencyFilter); - } - } else { - node.remove("fielddata"); - } - final Object loading = fielddata.get("loading"); - if (loading != null) { - node.put("eager_global_ordinals", "eager_global_ordinals".equals(loading)); - } - } - } - if (keyword) { - return new KeywordFieldMapper.TypeParser().parse(fieldName, node, parserContext); - } else { - return new TextFieldMapper.TypeParser().parse(fieldName, node, parserContext); - } - - } - Set unsupportedParameters = new HashSet<>(node.keySet()); - unsupportedParameters.removeAll(autoUpgradeParameters); - throw new IllegalArgumentException("The [string] type is removed in 5.0 and automatic upgrade failed because parameters " - + unsupportedParameters + " are not supported for automatic upgrades. You should now use either a [text] " - + "or [keyword] field instead for field [" + fieldName + "]"); - } - - StringFieldMapper.Builder builder = new StringFieldMapper.Builder(fieldName); - // hack for the fact that string can't just accept true/false for - // the index property and still accepts no/not_analyzed/analyzed - final Object index = node.remove("index"); - if (index != null) { - final String normalizedIndex = index.toString(); - switch (normalizedIndex) { - case "analyzed": - builder.tokenized(true); - node.put("index", true); - break; - case "not_analyzed": - builder.tokenized(false); - node.put("index", true); - break; - case "no": - node.put("index", false); - break; - default: - throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [no], [not_analyzed] or [analyzed]"); - } - } - final Object fielddataObject = node.get("fielddata"); - if (fielddataObject instanceof Map) { - Map fielddata = (Map) fielddataObject; - final Object loading = fielddata.get("loading"); - if (loading != null) { - node.put("eager_global_ordinals", "eager_global_ordinals".equals(loading)); - } - Map fielddataFilter = (Map) fielddata.get("filter"); - if (fielddataFilter != null) { - Map frequencyFilter = (Map) fielddataFilter.get("frequency"); - frequencyFilter.keySet().retainAll(Arrays.asList("min", "max", "min_segment_size")); - node.put("fielddata_frequency_filter", frequencyFilter); - } - node.put("fielddata", "disabled".equals(fielddata.get("format")) == false); - } - parseTextField(builder, fieldName, node, parserContext); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("null_value")) { - if (propNode == null) { - throw new MapperParsingException("Property [null_value] cannot be null."); - } - builder.nullValue(propNode.toString()); - iterator.remove(); - } else if (propName.equals("position_increment_gap")) { - int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1); - if (newPositionIncrementGap < 0) { - throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed."); - } - builder.positionIncrementGap(newPositionIncrementGap); - // we need to update to actual analyzers if they are not set in this case... - // so we can inject the position increment gap... - if (builder.fieldType().indexAnalyzer() == null) { - builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer()); - } - if (builder.fieldType().searchAnalyzer() == null) { - builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer()); - } - if (builder.fieldType().searchQuoteAnalyzer() == null) { - builder.fieldType().setSearchQuoteAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer()); - } - iterator.remove(); - } else if (propName.equals("ignore_above")) { - builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1)); - iterator.remove(); - } else if (propName.equals("fielddata")) { - builder.fielddata(XContentMapValues.nodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("eager_global_ordinals")) { - builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(propNode)); - iterator.remove(); - } else if (propName.equals("fielddata_frequency_filter")) { - Map frequencyFilter = (Map) propNode; - double minFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("min"), 0); - double maxFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("max"), Integer.MAX_VALUE); - int minSegmentSize = XContentMapValues.nodeIntegerValue(frequencyFilter.remove("min_segment_size"), 0); - builder.fielddataFrequencyFilter(minFrequency, maxFrequency, minSegmentSize); - DocumentMapperParser.checkNoRemainingFields(propName, frequencyFilter, parserContext.indexVersionCreated()); - iterator.remove(); - } - } - return builder; - } - } - - public static final class StringFieldType extends org.elasticsearch.index.mapper.StringFieldType { - - private boolean fielddata; - private double fielddataMinFrequency; - private double fielddataMaxFrequency; - private int fielddataMinSegmentSize; - - public StringFieldType() { - fielddata = true; - fielddataMinFrequency = Defaults.FIELDDATA_MIN_FREQUENCY; - fielddataMaxFrequency = Defaults.FIELDDATA_MAX_FREQUENCY; - fielddataMinSegmentSize = Defaults.FIELDDATA_MIN_SEGMENT_SIZE; - } - - protected StringFieldType(StringFieldType ref) { - super(ref); - this.fielddata = ref.fielddata; - this.fielddataMinFrequency = ref.fielddataMinFrequency; - this.fielddataMaxFrequency = ref.fielddataMaxFrequency; - this.fielddataMinSegmentSize = ref.fielddataMinSegmentSize; - } - - @Override - public boolean equals(Object o) { - if (super.equals(o) == false) { - return false; - } - StringFieldType that = (StringFieldType) o; - return fielddata == that.fielddata - && fielddataMinFrequency == that.fielddataMinFrequency - && fielddataMaxFrequency == that.fielddataMaxFrequency - && fielddataMinSegmentSize == that.fielddataMinSegmentSize; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), fielddata, - fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize); - } - - public StringFieldType clone() { - return new StringFieldType(this); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public void checkCompatibility(MappedFieldType other, - List conflicts, boolean strict) { - super.checkCompatibility(other, conflicts, strict); - StringFieldType otherType = (StringFieldType) other; - if (strict) { - if (fielddata() != otherType.fielddata()) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [fielddata] " - + "across all types."); - } - if (fielddataMinFrequency() != otherType.fielddataMinFrequency()) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update " - + "[fielddata_frequency_filter.min] across all types."); - } - if (fielddataMaxFrequency() != otherType.fielddataMaxFrequency()) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update " - + "[fielddata_frequency_filter.max] across all types."); - } - if (fielddataMinSegmentSize() != otherType.fielddataMinSegmentSize()) { - conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update " - + "[fielddata_frequency_filter.min_segment_size] across all types."); - } - } - } - - public boolean fielddata() { - return fielddata; - } - - public void setFielddata(boolean fielddata) { - checkIfFrozen(); - this.fielddata = fielddata; - } - - public double fielddataMinFrequency() { - return fielddataMinFrequency; - } - - public void setFielddataMinFrequency(double fielddataMinFrequency) { - checkIfFrozen(); - this.fielddataMinFrequency = fielddataMinFrequency; - } - - public double fielddataMaxFrequency() { - return fielddataMaxFrequency; - } - - public void setFielddataMaxFrequency(double fielddataMaxFrequency) { - checkIfFrozen(); - this.fielddataMaxFrequency = fielddataMaxFrequency; - } - - public int fielddataMinSegmentSize() { - return fielddataMinSegmentSize; - } - - public void setFielddataMinSegmentSize(int fielddataMinSegmentSize) { - checkIfFrozen(); - this.fielddataMinSegmentSize = fielddataMinSegmentSize; - } - - @Override - public Query nullValueQuery() { - if (nullValue() == null) { - return null; - } - return termQuery(nullValue(), null); - } - - @Override - public IndexFieldData.Builder fielddataBuilder() { - if (hasDocValues()) { - return new DocValuesIndexFieldData.Builder(); - } else if (fielddata) { - return new PagedBytesIndexFieldData.Builder(fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize); - } else { - throw new IllegalArgumentException("Fielddata is disabled on analyzed string fields by default. Set fielddata=true on [" - + name() + "] in order to load fielddata in memory by uninverting the inverted index. Note that this can however " - + "use significant memory."); - } - } - } - - private Boolean includeInAll; - private int positionIncrementGap; - private int ignoreAbove; - - protected StringFieldMapper(String simpleName, StringFieldType fieldType, MappedFieldType defaultFieldType, - int positionIncrementGap, int ignoreAbove, Boolean includeInAll, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { - super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); - if (Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha1)) { - throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] " - + "or [keyword] field instead for field [" + fieldType.name() + "]"); - } - if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) { - throw new MapperParsingException("Field [" + fieldType.name() + "] cannot be analyzed and have doc values"); - } - if (fieldType.hasDocValues() && ( - fieldType.fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY - || fieldType.fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY - || fieldType.fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE)) { - throw new MapperParsingException("Field [" + fieldType.name() + "] cannot have doc values and use fielddata filtering"); - } - this.positionIncrementGap = positionIncrementGap; - this.ignoreAbove = ignoreAbove; - this.includeInAll = includeInAll; - } - - @Override - protected StringFieldMapper clone() { - return (StringFieldMapper) super.clone(); - } - - @Override - protected boolean customBoost() { - return true; - } - - public int getPositionIncrementGap() { - return this.positionIncrementGap; - } - - public int getIgnoreAbove() { - return ignoreAbove; - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - ValueAndBoost valueAndBoost = parseCreateFieldForString(context, fieldType().nullValueAsString(), fieldType().boost()); - if (valueAndBoost.value() == null) { - return; - } - if (ignoreAbove > 0 && valueAndBoost.value().length() > ignoreAbove) { - return; - } - if (context.includeInAll(includeInAll, this)) { - context.allEntries().addText(fieldType().name(), valueAndBoost.value(), valueAndBoost.boost()); - } - - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType()); - if (valueAndBoost.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(valueAndBoost.boost()); - } - fields.add(field); - } - if (fieldType().hasDocValues()) { - fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(valueAndBoost.value()))); - } - } - - /** - * Parse a field as though it were a string. - * @param context parse context used during parsing - * @param nullValue value to use for null - * @param defaultBoost default boost value returned unless overwritten in the field - * @return the parsed field and the boost either parsed or defaulted - * @throws IOException if thrown while parsing - */ - public static ValueAndBoost parseCreateFieldForString(ParseContext context, String nullValue, float defaultBoost) throws IOException { - if (context.externalValueSet()) { - return new ValueAndBoost(context.externalValue().toString(), defaultBoost); - } - XContentParser parser = context.parser(); - if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { - return new ValueAndBoost(nullValue, defaultBoost); - } - if (parser.currentToken() == XContentParser.Token.START_OBJECT - && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - XContentParser.Token token; - String currentFieldName = null; - String value = nullValue; - float boost = defaultBoost; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - if ("value".equals(currentFieldName) || "_value".equals(currentFieldName)) { - value = parser.textOrNull(); - } else if ("boost".equals(currentFieldName) || "_boost".equals(currentFieldName)) { - boost = parser.floatValue(); - } else { - throw new IllegalArgumentException("unknown property [" + currentFieldName + "]"); - } - } - } - return new ValueAndBoost(value, boost); - } - return new ValueAndBoost(parser.textOrNull(), defaultBoost); - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - super.doMerge(mergeWith, updateAllTypes); - this.includeInAll = ((StringFieldMapper) mergeWith).includeInAll; - this.ignoreAbove = ((StringFieldMapper) mergeWith).ignoreAbove; - } - - @Override - protected String indexTokenizeOption(boolean indexed, boolean tokenized) { - if (!indexed) { - return "no"; - } else if (tokenized) { - return "analyzed"; - } else { - return "not_analyzed"; - } - } - - @Override - public StringFieldType fieldType() { - return (StringFieldType) super.fieldType(); - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - doXContentAnalyzers(builder, includeDefaults); - - if (includeDefaults || fieldType().nullValue() != null) { - builder.field("null_value", fieldType().nullValue()); - } - if (includeInAll != null) { - builder.field("include_in_all", includeInAll); - } else if (includeDefaults) { - builder.field("include_in_all", false); - } - - if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) { - builder.field("position_increment_gap", positionIncrementGap); - } - - if (includeDefaults || ignoreAbove != Defaults.IGNORE_ABOVE) { - builder.field("ignore_above", ignoreAbove); - } - if (includeDefaults || fieldType().fielddata() != ((StringFieldType) defaultFieldType).fielddata()) { - builder.field("fielddata", fieldType().fielddata()); - } - if (fieldType().fielddata()) { - if (includeDefaults - || fieldType().fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY - || fieldType().fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY - || fieldType().fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE) { - builder.startObject("fielddata_frequency_filter"); - if (includeDefaults || fieldType().fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY) { - builder.field("min", fieldType().fielddataMinFrequency()); - } - if (includeDefaults || fieldType().fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY) { - builder.field("max", fieldType().fielddataMaxFrequency()); - } - if (includeDefaults || fieldType().fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE) { - builder.field("min_segment_size", fieldType().fielddataMinSegmentSize()); - } - builder.endObject(); - } - } - } - - /** - * Parsed value and boost to be returned from {@link #parseCreateFieldForString}. - */ - public static class ValueAndBoost { - private final String value; - private final float boost; - - public ValueAndBoost(String value, float boost) { - this.value = value; - this.boost = boost; - } - - /** - * Value of string field. - * @return value of string field - */ - public String value() { - return value; - } - - /** - * Boost either parsed from the document or defaulted. - * @return boost either parsed from the document or defaulted - */ - public float boost() { - return boost; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TTLFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TTLFieldMapper.java deleted file mode 100644 index fcb2fac9268..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/TTLFieldMapper.java +++ /dev/null @@ -1,265 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.Field; -import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; -import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.AlreadyExpiredException; - -import java.io.IOException; -import java.util.Date; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeTimeValue; - -public class TTLFieldMapper extends MetadataFieldMapper { - - public static final String NAME = "_ttl"; - public static final String CONTENT_TYPE = "_ttl"; - - public static class Defaults extends LegacyLongFieldMapper.Defaults { - public static final String NAME = TTLFieldMapper.CONTENT_TYPE; - - public static final TTLFieldType TTL_FIELD_TYPE = new TTLFieldType(); - - static { - TTL_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); - TTL_FIELD_TYPE.setStored(true); - TTL_FIELD_TYPE.setTokenized(false); - TTL_FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); - TTL_FIELD_TYPE.setName(NAME); - TTL_FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); - TTL_FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - TTL_FIELD_TYPE.freeze(); - } - - public static final EnabledAttributeMapper ENABLED_STATE = EnabledAttributeMapper.UNSET_DISABLED; - public static final long DEFAULT = -1; - } - - public static class Builder extends MetadataFieldMapper.Builder { - - private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; - private long defaultTTL = Defaults.DEFAULT; - - public Builder() { - super(Defaults.NAME, Defaults.TTL_FIELD_TYPE, Defaults.FIELD_TYPE); - } - - public Builder enabled(EnabledAttributeMapper enabled) { - this.enabledState = enabled; - return builder; - } - - public Builder defaultTTL(long defaultTTL) { - this.defaultTTL = defaultTTL; - return builder; - } - - @Override - public TTLFieldMapper build(BuilderContext context) { - setupFieldType(context); - fieldType.setHasDocValues(false); - return new TTLFieldMapper(fieldType, enabledState, defaultTTL, context.indexSettings()); - } - } - - public static class TypeParser implements MetadataFieldMapper.TypeParser { - @Override - public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha4)) { - throw new IllegalArgumentException("[_ttl] is removed in 5.0. As a replacement, you should use time based indexes or cron a delete-by-query with a range query on a timestamp field."); - } - Builder builder = new Builder(); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = entry.getKey(); - Object fieldNode = entry.getValue(); - if (fieldName.equals("enabled")) { - EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; - builder.enabled(enabledState); - iterator.remove(); - } else if (fieldName.equals("default")) { - TimeValue ttlTimeValue = nodeTimeValue(fieldNode, null); - if (ttlTimeValue != null) { - builder.defaultTTL(ttlTimeValue.millis()); - } - iterator.remove(); - } - } - return builder; - } - - @Override - public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) { - return new TTLFieldMapper(indexSettings); - } - } - - public static final class TTLFieldType extends LegacyLongFieldMapper.LongFieldType { - - public TTLFieldType() { - } - - protected TTLFieldType(TTLFieldType ref) { - super(ref); - } - - @Override - public TTLFieldType clone() { - return new TTLFieldType(this); - } - - // Overrides valueForSearch to display live value of remaining ttl - @Override - public Object valueForDisplay(Object value) { - final long now = System.currentTimeMillis(); - Long val = (Long) super.valueForDisplay(value); - return val - now; - } - } - - private EnabledAttributeMapper enabledState; - private long defaultTTL; - - private TTLFieldMapper(Settings indexSettings) { - this(Defaults.TTL_FIELD_TYPE.clone(), Defaults.ENABLED_STATE, Defaults.DEFAULT, indexSettings); - } - - private TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL, - Settings indexSettings) { - super(NAME, fieldType, Defaults.TTL_FIELD_TYPE, indexSettings); - if (enabled.enabled && Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha4)) { - throw new IllegalArgumentException("[_ttl] is removed in 5.0. As a replacement, you should use time based indexes or cron a delete-by-query with a range query on a timestamp field."); - } - this.enabledState = enabled; - this.defaultTTL = defaultTTL; - } - - public boolean enabled() { - return this.enabledState.enabled; - } - - public long defaultTTL() { - return this.defaultTTL; - } - - @Override - public void preParse(ParseContext context) throws IOException { - } - - @Override - public void postParse(ParseContext context) throws IOException { - super.parse(context); - } - - @Override - public Mapper parse(ParseContext context) throws IOException, MapperParsingException { - if (context.sourceToParse().ttl() < 0) { // no ttl has been provided externally - long ttl; - if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) { - ttl = TimeValue.parseTimeValue(context.parser().text(), null, "ttl").millis(); - } else { - ttl = context.parser().longValue(true); - } - if (ttl <= 0) { - throw new MapperParsingException("TTL value must be > 0. Illegal value provided [" + ttl + "]"); - } - context.sourceToParse().ttl(ttl); - } - return null; - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException, AlreadyExpiredException { - if (enabledState.enabled) { - long ttl = context.sourceToParse().ttl(); - if (ttl <= 0 && defaultTTL > 0) { // no ttl provided so we use the default value - ttl = defaultTTL; - context.sourceToParse().ttl(ttl); - } - if (ttl > 0) { // a ttl has been provided either externally or in the _source - long timestamp = context.sourceToParse().timestamp(); - long expire = new Date(timestamp + ttl).getTime(); - long now = System.currentTimeMillis(); - // there is not point indexing already expired doc - if (context.sourceToParse().origin() == SourceToParse.Origin.PRIMARY && now >= expire) { - throw new AlreadyExpiredException(context.sourceToParse().index(), - context.sourceToParse().type(), context.sourceToParse().id(), timestamp, ttl, now); - } - // the expiration timestamp (timestamp + ttl) is set as field - fields.add(new LegacyLongFieldMapper.CustomLongNumericField(expire, fieldType())); - } - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - - // if all are defaults, no sense to write it at all - if (!includeDefaults && enabledState == Defaults.ENABLED_STATE && defaultTTL == Defaults.DEFAULT) { - return builder; - } - builder.startObject(CONTENT_TYPE); - if (includeDefaults || enabledState != Defaults.ENABLED_STATE) { - builder.field("enabled", enabledState.enabled); - } - if (includeDefaults || defaultTTL != Defaults.DEFAULT && enabledState.enabled) { - builder.field("default", defaultTTL); - } - builder.endObject(); - return builder; - } - - @Override - protected String contentType() { - return NAME; - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - TTLFieldMapper ttlMergeWith = (TTLFieldMapper) mergeWith; - if (ttlMergeWith.enabledState != Defaults.ENABLED_STATE) {//only do something if actually something was set for the document mapper that we merge with - if (this.enabledState == EnabledAttributeMapper.ENABLED && ttlMergeWith.enabledState == EnabledAttributeMapper.DISABLED) { - throw new IllegalArgumentException("_ttl cannot be disabled once it was enabled."); - } else { - this.enabledState = ttlMergeWith.enabledState; - } - } - if (ttlMergeWith.defaultTTL != -1) { - // we never build the default when the field is disabled so we should also not set it - // (it does not make a difference though as everything that is not build in toXContent will also not be set in the cluster) - if (enabledState == EnabledAttributeMapper.ENABLED) { - this.defaultTTL = ttlMergeWith.defaultTTL; - } - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 63febfcaf01..bb8c4d77a63 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -21,8 +21,8 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; -import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -31,15 +31,11 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; -import static java.util.Collections.unmodifiableList; import static org.elasticsearch.index.mapper.TypeParsers.parseTextField; /** A {@link FieldMapper} for full-text fields. */ @@ -48,14 +44,6 @@ public class TextFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "text"; private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; - private static final List SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING = unmodifiableList(Arrays.asList( - "type", - // common text parameters, for which the upgrade is straightforward - "index", "store", "doc_values", "omit_norms", "norms", "boost", "fields", "copy_to", - "fielddata", "eager_global_ordinals", "fielddata_frequency_filter", "include_in_all", - "analyzer", "search_analyzer", "search_quote_analyzer", - "index_options", "position_increment_gap", "similarity")); - public static class Defaults { public static double FIELDDATA_MIN_FREQUENCY = 0; public static double FIELDDATA_MAX_FREQUENCY = Integer.MAX_VALUE; @@ -142,41 +130,6 @@ public class TextFieldMapper extends FieldMapper { public static class TypeParser implements Mapper.TypeParser { @Override public Mapper.Builder parse(String fieldName, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha1)) { - // Downgrade "text" to "string" in indexes created in 2.x so you can use modern syntax against old indexes - Set unsupportedParameters = new HashSet<>(node.keySet()); - unsupportedParameters.removeAll(SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING); - if (false == SUPPORTED_PARAMETERS_FOR_AUTO_DOWNGRADE_TO_STRING.containsAll(node.keySet())) { - throw new IllegalArgumentException("Automatic downgrade from [text] to [string] failed because parameters " - + unsupportedParameters + " are not supported for automatic downgrades."); - } - { // Downgrade "index" - Object index = node.get("index"); - if (index == null || Boolean.TRUE.equals(index)) { - index = "analyzed"; - } else if (Boolean.FALSE.equals(index)) { - index = "no"; - } else { - throw new IllegalArgumentException( - "Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true] or [false]"); - } - node.put("index", index); - } - { // Downgrade "fielddata" (default in string is true, default in text is false) - Object fielddata = node.get("fielddata"); - if (fielddata == null || Boolean.FALSE.equals(fielddata)) { - fielddata = false; - } else if (Boolean.TRUE.equals(fielddata)) { - fielddata = true; - } else { - throw new IllegalArgumentException("can't parse [fielddata] value for [" + fielddata + "] for field [" - + fieldName + "], expected [true] or [false]"); - } - node.put("fielddata", fielddata); - } - - return new StringFieldMapper.TypeParser().parse(fieldName, node, parserContext); - } TextFieldMapper.Builder builder = new TextFieldMapper.Builder(fieldName); builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer()); builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer()); @@ -371,7 +324,7 @@ public class TextFieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { final String value; if (context.externalValueSet()) { value = context.externalValue().toString(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TimestampFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TimestampFieldMapper.java deleted file mode 100644 index d933794efbc..00000000000 --- a/core/src/main/java/org/elasticsearch/index/mapper/TimestampFieldMapper.java +++ /dev/null @@ -1,309 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.Field; -import org.apache.lucene.document.NumericDocValuesField; -import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; -import org.elasticsearch.action.TimestampParsingException; -import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.common.lucene.Lucene; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue; -import static org.elasticsearch.index.mapper.TypeParsers.parseDateTimeFormatter; - -public class TimestampFieldMapper extends MetadataFieldMapper { - - public static final String NAME = "_timestamp"; - public static final String CONTENT_TYPE = "_timestamp"; - public static final String DEFAULT_DATE_TIME_FORMAT = "epoch_millis||strictDateOptionalTime"; - - public static class Defaults extends LegacyDateFieldMapper.Defaults { - public static final String NAME = "_timestamp"; - - // TODO: this should be removed - public static final TimestampFieldType FIELD_TYPE = new TimestampFieldType(); - public static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern(DEFAULT_DATE_TIME_FORMAT); - - static { - FIELD_TYPE.setStored(true); - FIELD_TYPE.setTokenized(false); - FIELD_TYPE.setNumericPrecisionStep(Defaults.PRECISION_STEP_64_BIT); - FIELD_TYPE.setName(NAME); - FIELD_TYPE.setDateTimeFormatter(DATE_TIME_FORMATTER); - FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - FIELD_TYPE.setHasDocValues(true); - FIELD_TYPE.freeze(); - } - - public static final EnabledAttributeMapper ENABLED = EnabledAttributeMapper.UNSET_DISABLED; - public static final String DEFAULT_TIMESTAMP = "now"; - } - - public static class Builder extends MetadataFieldMapper.Builder { - - private EnabledAttributeMapper enabledState = EnabledAttributeMapper.UNSET_DISABLED; - private String defaultTimestamp = Defaults.DEFAULT_TIMESTAMP; - private Boolean ignoreMissing = null; - - public Builder(MappedFieldType existing) { - super(Defaults.NAME, existing == null ? Defaults.FIELD_TYPE : existing, Defaults.FIELD_TYPE); - } - - @Override - public LegacyDateFieldMapper.DateFieldType fieldType() { - return (LegacyDateFieldMapper.DateFieldType)fieldType; - } - - public Builder enabled(EnabledAttributeMapper enabledState) { - this.enabledState = enabledState; - return builder; - } - - public Builder dateTimeFormatter(FormatDateTimeFormatter dateTimeFormatter) { - fieldType().setDateTimeFormatter(dateTimeFormatter); - return this; - } - - public Builder defaultTimestamp(String defaultTimestamp) { - this.defaultTimestamp = defaultTimestamp; - return builder; - } - - public Builder ignoreMissing(boolean ignoreMissing) { - this.ignoreMissing = ignoreMissing; - return builder; - } - - @Override - public Builder store(boolean store) { - return super.store(store); - } - - @Override - public TimestampFieldMapper build(BuilderContext context) { - setupFieldType(context); - return new TimestampFieldMapper(fieldType, defaultFieldType, enabledState, defaultTimestamp, - ignoreMissing, context.indexSettings()); - } - } - - public static class TypeParser implements MetadataFieldMapper.TypeParser { - @Override - public MetadataFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha4)) { - throw new IllegalArgumentException("[_timestamp] is removed in 5.0. As a replacement, you can use an ingest pipeline to add a field with the current timestamp to your documents."); - } - Builder builder = new Builder(parserContext.mapperService().fullName(NAME)); - boolean defaultSet = false; - Boolean ignoreMissing = null; - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String fieldName = entry.getKey(); - Object fieldNode = entry.getValue(); - if (fieldName.equals("enabled")) { - EnabledAttributeMapper enabledState = lenientNodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED; - builder.enabled(enabledState); - iterator.remove(); - } else if (fieldName.equals("format")) { - builder.dateTimeFormatter(parseDateTimeFormatter(fieldNode.toString())); - iterator.remove(); - } else if (fieldName.equals("default")) { - if (fieldNode == null) { - throw new TimestampParsingException("default timestamp can not be set to null"); - } else { - builder.defaultTimestamp(fieldNode.toString()); - defaultSet = true; - } - iterator.remove(); - } else if (fieldName.equals("ignore_missing")) { - ignoreMissing = lenientNodeBooleanValue(fieldNode); - builder.ignoreMissing(ignoreMissing); - iterator.remove(); - } - } - - // We can not accept a default value and rejecting null values at the same time - if (defaultSet && (ignoreMissing != null && ignoreMissing == false)) { - throw new TimestampParsingException("default timestamp can not be set with ignore_missing set to false"); - } - - return builder; - } - - @Override - public MetadataFieldMapper getDefault(Settings indexSettings, MappedFieldType fieldType, String typeName) { - return new TimestampFieldMapper(indexSettings, fieldType); - } - } - - public static final class TimestampFieldType extends LegacyDateFieldMapper.DateFieldType { - - public TimestampFieldType() {} - - protected TimestampFieldType(TimestampFieldType ref) { - super(ref); - } - - @Override - public TimestampFieldType clone() { - return new TimestampFieldType(this); - } - - @Override - public Object valueForDisplay(Object value) { - return value; - } - } - - private EnabledAttributeMapper enabledState; - - private final String defaultTimestamp; - private final Boolean ignoreMissing; - - private TimestampFieldMapper(Settings indexSettings, MappedFieldType existing) { - this(existing != null ? existing : Defaults.FIELD_TYPE, Defaults.FIELD_TYPE, Defaults.ENABLED, Defaults.DEFAULT_TIMESTAMP, null, indexSettings); - } - - private TimestampFieldMapper(MappedFieldType fieldType, MappedFieldType defaultFieldType, EnabledAttributeMapper enabledState, - String defaultTimestamp, Boolean ignoreMissing, Settings indexSettings) { - super(NAME, fieldType, defaultFieldType, indexSettings); - if (enabledState.enabled && Version.indexCreated(indexSettings).onOrAfter(Version.V_5_0_0_alpha4)) { - throw new IllegalArgumentException("[_timestamp] is removed in 5.0. As a replacement, you can use an ingest pipeline to add a field with the current timestamp to your documents."); - } - this.enabledState = enabledState; - this.defaultTimestamp = defaultTimestamp; - this.ignoreMissing = ignoreMissing; - } - - @Override - public TimestampFieldType fieldType() { - return (TimestampFieldType)super.fieldType(); - } - - public boolean enabled() { - return this.enabledState.enabled; - } - - public String defaultTimestamp() { - return this.defaultTimestamp; - } - - public Boolean ignoreMissing() { - return this.ignoreMissing; - } - - @Override - public void preParse(ParseContext context) throws IOException { - super.parse(context); - } - - @Override - public void postParse(ParseContext context) throws IOException { - } - - @Override - public Mapper parse(ParseContext context) throws IOException { - // nothing to do here, we call the parent in preParse - return null; - } - - @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - if (enabledState.enabled) { - long timestamp = context.sourceToParse().timestamp(); - if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - fields.add(new LegacyLongFieldMapper.CustomLongNumericField(timestamp, fieldType())); - } - if (fieldType().hasDocValues()) { - fields.add(new NumericDocValuesField(fieldType().name(), timestamp)); - } - } - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - boolean includeDefaults = params.paramAsBoolean("include_defaults", false); - - // if all are defaults, no sense to write it at all - if (!includeDefaults && enabledState == Defaults.ENABLED - && fieldType().dateTimeFormatter().format().equals(Defaults.DATE_TIME_FORMATTER.format()) - && Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)) { - return builder; - } - builder.startObject(CONTENT_TYPE); - if (includeDefaults || enabledState != Defaults.ENABLED) { - builder.field("enabled", enabledState.enabled); - } - // different format handling depending on index version - String defaultDateFormat = Defaults.DATE_TIME_FORMATTER.format(); - if (includeDefaults || !fieldType().dateTimeFormatter().format().equals(defaultDateFormat)) { - builder.field("format", fieldType().dateTimeFormatter().format()); - } - if (includeDefaults || !Defaults.DEFAULT_TIMESTAMP.equals(defaultTimestamp)) { - builder.field("default", defaultTimestamp); - } - if (includeDefaults || ignoreMissing != null) { - builder.field("ignore_missing", ignoreMissing); - } - - builder.endObject(); - return builder; - } - - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - TimestampFieldMapper timestampFieldMapperMergeWith = (TimestampFieldMapper) mergeWith; - super.doMerge(mergeWith, updateAllTypes); - if (timestampFieldMapperMergeWith.enabledState != enabledState && !timestampFieldMapperMergeWith.enabledState.unset()) { - this.enabledState = timestampFieldMapperMergeWith.enabledState; - } - if (timestampFieldMapperMergeWith.defaultTimestamp() == null && defaultTimestamp == null) { - return; - } - List conflicts = new ArrayList<>(); - if (defaultTimestamp == null) { - conflicts.add("Cannot update default in _timestamp value. Value is null now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); - } else if (timestampFieldMapperMergeWith.defaultTimestamp() == null) { - conflicts.add("Cannot update default in _timestamp value. Value is \" + defaultTimestamp.toString() + \" now encountering null"); - } else if (!timestampFieldMapperMergeWith.defaultTimestamp().equals(defaultTimestamp)) { - conflicts.add("Cannot update default in _timestamp value. Value is " + defaultTimestamp.toString() + " now encountering " + timestampFieldMapperMergeWith.defaultTimestamp()); - } - if (conflicts.isEmpty() == false) { - throw new IllegalArgumentException("Conflicts: " + conflicts); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java index 3b6026d1b21..a2d40cd08bd 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TokenCountFieldMapper.java @@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -39,7 +40,7 @@ import static org.elasticsearch.index.mapper.TypeParsers.parseField; /** * A {@link FieldMapper} that takes a string and writes a count of the tokens in that string - * to the index. In most ways the mapper acts just like an {@link LegacyIntegerFieldMapper}. + * to the index. In most ways the mapper acts just like an {@link NumberFieldMapper}. */ public class TokenCountFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "token_count"; @@ -77,9 +78,6 @@ public class TokenCountFieldMapper extends FieldMapper { @Override @SuppressWarnings("unchecked") public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) { - return new LegacyTokenCountFieldMapper.TypeParser().parse(name, node, parserContext); - } TokenCountFieldMapper.Builder builder = new TokenCountFieldMapper.Builder(name); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); @@ -114,7 +112,7 @@ public class TokenCountFieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { final String value; if (context.externalValueSet()) { value = context.externalValue().toString(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java index 0e7df788a9f..551208c797e 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.Term; @@ -33,7 +34,6 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -186,6 +186,10 @@ public class TypeFieldMapper extends MetadataFieldMapper { this.types = types; } + public BytesRef[] getTerms() { + return types; + } + @Override public Query rewrite(IndexReader reader) throws IOException { final int threshold = Math.min(BOOLEAN_REWRITE_TERM_COUNT_THRESHOLD, BooleanQuery.getMaxClauseCount()); @@ -256,13 +260,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { private static MappedFieldType defaultFieldType(Settings indexSettings) { MappedFieldType defaultFieldType = Defaults.FIELD_TYPE.clone(); - Version indexCreated = Version.indexCreated(indexSettings); - if (indexCreated.before(Version.V_2_1_0)) { - // enables fielddata loading, doc values was disabled on _type between 2.0 and 2.1. - ((TypeFieldType) defaultFieldType).setFielddata(true); - } else { - defaultFieldType.setHasDocValues(true); - } + defaultFieldType.setHasDocValues(true); return defaultFieldType; } @@ -282,7 +280,7 @@ public class TypeFieldMapper extends MetadataFieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { if (fieldType().indexOptions() == IndexOptions.NONE && !fieldType().stored()) { return; } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java index 97828e2bfab..475848989d4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/TypeParsers.java @@ -71,32 +71,6 @@ public class TypeParsers { } } - @Deprecated // for legacy ints only - public static void parseNumberField(LegacyNumberFieldMapper.Builder builder, String name, Map numberNode, Mapper.TypeParser.ParserContext parserContext) { - parseField(builder, name, numberNode, parserContext); - for (Iterator> iterator = numberNode.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("precision_step")) { - builder.precisionStep(nodeIntegerValue(propNode)); - iterator.remove(); - } else if (propName.equals("ignore_malformed")) { - builder.ignoreMalformed(nodeBooleanValue("ignore_malformed", propNode, parserContext)); - iterator.remove(); - } else if (propName.equals("coerce")) { - builder.coerce(nodeBooleanValue("coerce", propNode, parserContext)); - iterator.remove(); - } else if (propName.equals("similarity")) { - SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString()); - builder.similarity(similarityProvider); - iterator.remove(); - } else if (parseMultiField(builder, name, parserContext, propName, propNode)) { - iterator.remove(); - } - } - } - private static void parseAnalyzersAndTermVectors(FieldMapper.Builder builder, String name, Map fieldNode, Mapper.TypeParser.ParserContext parserContext) { NamedAnalyzer indexAnalyzer = null; NamedAnalyzer searchAnalyzer = null; @@ -270,23 +244,13 @@ public class TypeParsers { iterator.remove(); } else if (propName.equals("copy_to")) { if (parserContext.isWithinMultiField()) { - if (indexVersionCreated.after(Version.V_2_1_0) || - (indexVersionCreated.after(Version.V_2_0_1) && indexVersionCreated.before(Version.V_2_1_0))) { - throw new MapperParsingException("copy_to in multi fields is not allowed. Found the copy_to in field [" + name + "] which is within a multi field."); - } else { - ESLoggerFactory.getLogger("mapping [" + parserContext.type() + "]").warn("Found a copy_to in field [{}] which is within a multi field. This feature has been removed and the copy_to will be removed from the mapping.", name); - } + throw new MapperParsingException("copy_to in multi fields is not allowed. Found the copy_to in field [" + name + "] which is within a multi field."); } else { parseCopyFields(propNode, builder); } iterator.remove(); } } - if (indexVersionCreated.before(Version.V_2_2_0)) { - // analyzer, search_analyzer, term_vectors were accepted on all fields - // before 2.2, even though it made little sense - parseAnalyzersAndTermVectors(builder, name, fieldNode, parserContext); - } } public static boolean parseMultiField(FieldMapper.Builder builder, String name, Mapper.TypeParser.ParserContext parserContext, String propName, Object propNode) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/UidFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/UidFieldMapper.java index ee6d76c2480..c0515b18bcc 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/UidFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/UidFieldMapper.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.BinaryDocValuesField; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -125,7 +126,7 @@ public class UidFieldMapper extends MetadataFieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { Field uid = new Field(NAME, Uid.createUid(context.sourceToParse().type(), context.sourceToParse().id()), Defaults.FIELD_TYPE); fields.add(uid); if (fieldType().hasDocValues()) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java index eb46f7a21d6..fb686d7781b 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/VersionFieldMapper.java @@ -23,6 +23,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -101,7 +102,7 @@ public class VersionFieldMapper extends MetadataFieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { // see InternalEngine.updateVersion to see where the real version value is set final Field version = new NumericDocValuesField(NAME, -1L); context.version(version); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/SeqNoFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/SeqNoFieldMapper.java index 4489230b059..d94e05ae358 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/SeqNoFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/SeqNoFieldMapper.java @@ -23,6 +23,7 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; @@ -155,7 +156,7 @@ public class SeqNoFieldMapper extends MetadataFieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { // see InternalEngine.updateVersion to see where the real version value is set final Field seqNo = new NumericDocValuesField(NAME, SequenceNumbersService.UNASSIGNED_SEQ_NO); context.seqNo(seqNo); diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java index 1cfe2acb246..cec8c59c56a 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilder.java @@ -23,8 +23,6 @@ import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.geo.Rectangle; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.spatial.geopoint.search.GeoPointInBBoxQuery; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.Numbers; @@ -37,13 +35,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.LegacyGeoPointFieldType; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.search.geo.LegacyInMemoryGeoBoundingBoxQuery; -import org.elasticsearch.index.search.geo.LegacyIndexedGeoBoundingBoxQuery; import java.io.IOException; import java.util.Objects; @@ -58,7 +51,6 @@ import java.util.Optional; * */ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "geo_bounding_box"; - public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME, "geo_bbox"); /** Default type for executing this query (memory as of this writing). */ public static final GeoExecType DEFAULT_TYPE = GeoExecType.MEMORY; @@ -361,34 +353,8 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder use prefix encoded postings format - final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ? - GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX; - return new GeoPointInBBoxQuery(fieldType.name(), encoding, luceneBottomRight.lat(), luceneTopLeft.lat(), - luceneTopLeft.lon(), luceneBottomRight.lon()); - } - - Query query; - switch(type) { - case INDEXED: - LegacyGeoPointFieldType geoFieldType = ((LegacyGeoPointFieldType) fieldType); - query = LegacyIndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType, context); - break; - case MEMORY: - IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); - query = new LegacyInMemoryGeoBoundingBoxQuery(luceneTopLeft, luceneBottomRight, indexFieldData); - break; - default: - // Someone extended the type enum w/o adjusting this switch statement. - throw new IllegalStateException("geo bounding box type [" + type + "] not supported."); - } - - return query; + return LatLonPoint.newBoxQuery(fieldType.name(), luceneBottomRight.getLat(), luceneTopLeft.getLat(), + luceneTopLeft.getLon(), luceneBottomRight.getLon()); } @Override @@ -472,12 +438,12 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder use prefix encoded postings format - final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ? - GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX; - // Lucene 6.0 and earlier requires a radial restriction - if (indexVersionCreated.before(Version.V_5_0_0_alpha4)) { - normDistance = GeoUtils.maxRadialDistance(center, normDistance); - } - return new GeoPointDistanceQuery(fieldType.name(), encoding, center.lat(), center.lon(), normDistance); + return LatLonPoint.newDistanceQuery(fieldType.name(), center.lat(), center.lon(), normDistance); } @Override @@ -371,11 +346,11 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder { - public static final String NAME = "geo_distance_range"; - - public static final boolean DEFAULT_INCLUDE_LOWER = true; - public static final boolean DEFAULT_INCLUDE_UPPER = true; - public static final GeoDistance DEFAULT_GEO_DISTANCE = GeoDistance.DEFAULT; - public static final DistanceUnit DEFAULT_UNIT = DistanceUnit.DEFAULT; - @Deprecated - public static final String DEFAULT_OPTIMIZE_BBOX = "memory"; - - /** - * The default value for ignore_unmapped. - */ - public static final boolean DEFAULT_IGNORE_UNMAPPED = false; - - private static final ParseField FROM_FIELD = new ParseField("from"); - private static final ParseField TO_FIELD = new ParseField("to"); - private static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower"); - private static final ParseField INCLUDE_UPPER_FIELD = new ParseField("include_upper"); - private static final ParseField GT_FIELD = new ParseField("gt"); - private static final ParseField GTE_FIELD = new ParseField("gte", "ge"); - private static final ParseField LT_FIELD = new ParseField("lt"); - private static final ParseField LTE_FIELD = new ParseField("lte", "le"); - private static final ParseField UNIT_FIELD = new ParseField("unit"); - private static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type"); - private static final ParseField NAME_FIELD = new ParseField("_name"); - private static final ParseField BOOST_FIELD = new ParseField("boost"); - @Deprecated - private static final ParseField OPTIMIZE_BBOX_FIELD = new ParseField("optimize_bbox") - .withAllDeprecated("no replacement: `optimize_bbox` is no longer supported due to recent improvements"); - private static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize") - .withAllDeprecated("use validation_method instead"); - private static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed") - .withAllDeprecated("use validation_method instead"); - private static final ParseField VALIDATION_METHOD = new ParseField("validation_method"); - private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped"); - - private final String fieldName; - - private Object from; - private Object to; - private boolean includeLower = DEFAULT_INCLUDE_LOWER; - private boolean includeUpper = DEFAULT_INCLUDE_UPPER; - - private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; - - private final GeoPoint point; - - private GeoDistance geoDistance = DEFAULT_GEO_DISTANCE; - - private DistanceUnit unit = DEFAULT_UNIT; - - private String optimizeBbox = null; - - private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT; - - public GeoDistanceRangeQueryBuilder(String fieldName, GeoPoint point) { - if (Strings.isEmpty(fieldName)) { - throw new IllegalArgumentException("fieldName must not be null"); - } - if (point == null) { - throw new IllegalArgumentException("point must not be null"); - } - this.fieldName = fieldName; - this.point = point; - } - - public GeoDistanceRangeQueryBuilder(String fieldName, double lat, double lon) { - this(fieldName, new GeoPoint(lat, lon)); - } - - public GeoDistanceRangeQueryBuilder(String fieldName, String geohash) { - this(fieldName, geohash == null ? null : new GeoPoint().resetFromGeoHash(geohash)); - } - - /** - * Read from a stream. - */ - public GeoDistanceRangeQueryBuilder(StreamInput in) throws IOException { - super(in); - fieldName = in.readString(); - point = in.readGeoPoint(); - from = in.readGenericValue(); - to = in.readGenericValue(); - includeLower = in.readBoolean(); - includeUpper = in.readBoolean(); - unit = DistanceUnit.valueOf(in.readString()); - geoDistance = GeoDistance.readFromStream(in); - optimizeBbox = in.readOptionalString(); - validationMethod = GeoValidationMethod.readFromStream(in); - ignoreUnmapped = in.readBoolean(); - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - out.writeString(fieldName); - out.writeGeoPoint(point); - out.writeGenericValue(from); - out.writeGenericValue(to); - out.writeBoolean(includeLower); - out.writeBoolean(includeUpper); - out.writeString(unit.name()); - geoDistance.writeTo(out);; - out.writeOptionalString(optimizeBbox); - validationMethod.writeTo(out); - out.writeBoolean(ignoreUnmapped); - } - - public String fieldName() { - return fieldName; - } - - public GeoPoint point() { - return point; - } - - public GeoDistanceRangeQueryBuilder from(String from) { - if (from == null) { - throw new IllegalArgumentException("[from] must not be null"); - } - this.from = from; - return this; - } - - public GeoDistanceRangeQueryBuilder from(Number from) { - if (from == null) { - throw new IllegalArgumentException("[from] must not be null"); - } - this.from = from; - return this; - } - - public Object from() { - return from; - } - - public GeoDistanceRangeQueryBuilder to(String to) { - if (to == null) { - throw new IllegalArgumentException("[to] must not be null"); - } - this.to = to; - return this; - } - - public GeoDistanceRangeQueryBuilder to(Number to) { - if (to == null) { - throw new IllegalArgumentException("[to] must not be null"); - } - this.to = to; - return this; - } - - public Object to() { - return to; - } - - public GeoDistanceRangeQueryBuilder includeLower(boolean includeLower) { - this.includeLower = includeLower; - return this; - } - - public boolean includeLower() { - return includeLower; - } - - public GeoDistanceRangeQueryBuilder includeUpper(boolean includeUpper) { - this.includeUpper = includeUpper; - return this; - } - - public boolean includeUpper() { - return includeUpper; - } - - public GeoDistanceRangeQueryBuilder geoDistance(GeoDistance geoDistance) { - if (geoDistance == null) { - throw new IllegalArgumentException("geoDistance calculation mode must not be null"); - } - this.geoDistance = geoDistance; - return this; - } - - public GeoDistance geoDistance() { - return geoDistance; - } - - public GeoDistanceRangeQueryBuilder unit(DistanceUnit unit) { - if (unit == null) { - throw new IllegalArgumentException("distance unit must not be null"); - } - this.unit = unit; - return this; - } - - public DistanceUnit unit() { - return unit; - } - - @Deprecated - public GeoDistanceRangeQueryBuilder optimizeBbox(String optimizeBbox) { - this.optimizeBbox = optimizeBbox; - return this; - } - - @Deprecated - public String optimizeBbox() { - return optimizeBbox; - } - - /** Set validation method for coordinates. */ - public GeoDistanceRangeQueryBuilder setValidationMethod(GeoValidationMethod method) { - this.validationMethod = method; - return this; - } - - /** Returns validation method for coordinates. */ - public GeoValidationMethod getValidationMethod() { - return this.validationMethod; - } - - /** - * Sets whether the query builder should ignore unmapped fields (and run a - * {@link MatchNoDocsQuery} in place of this query) or throw an exception if - * the field is unmapped. - */ - public GeoDistanceRangeQueryBuilder ignoreUnmapped(boolean ignoreUnmapped) { - this.ignoreUnmapped = ignoreUnmapped; - return this; - } - - /** - * Gets whether the query builder will ignore unmapped fields (and run a - * {@link MatchNoDocsQuery} in place of this query) or throw an exception if - * the field is unmapped. - */ - public boolean ignoreUnmapped() { - return ignoreUnmapped; - } - - @Override - protected Query doToQuery(QueryShardContext context) throws IOException { - MappedFieldType fieldType = context.fieldMapper(fieldName); - if (fieldType == null) { - if (ignoreUnmapped) { - return new MatchNoDocsQuery(); - } else { - throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]"); - } - } - if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) { - throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field"); - } - - final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0); - final boolean indexCreatedBeforeV2_2 = context.indexVersionCreated().before(Version.V_2_2_0); - // validation was not available prior to 2.x, so to support bwc - // percolation queries we only ignore_malformed on 2.x created indexes - if (!indexCreatedBeforeV2_0 && !GeoValidationMethod.isIgnoreMalformed(validationMethod)) { - if (!GeoUtils.isValidLatitude(point.lat())) { - throw new QueryShardException(context, "illegal latitude value [{}] for [{}]", point.lat(), NAME); - } - if (!GeoUtils.isValidLongitude(point.lon())) { - throw new QueryShardException(context, "illegal longitude value [{}] for [{}]", point.lon(), NAME); - } - } - - GeoPoint point = new GeoPoint(this.point); - if (indexCreatedBeforeV2_2 == false || GeoValidationMethod.isCoerce(validationMethod)) { - GeoUtils.normalizePoint(point, true, true); - } - - Double fromValue; - Double toValue; - if (from != null) { - if (from instanceof Number) { - fromValue = unit.toMeters(((Number) from).doubleValue()); - } else { - fromValue = DistanceUnit.parse((String) from, unit, DistanceUnit.DEFAULT); - } - if (indexCreatedBeforeV2_2) { - fromValue = geoDistance.normalize(fromValue, DistanceUnit.DEFAULT); - } - } else { - fromValue = 0.0; - } - - if (to != null) { - if (to instanceof Number) { - toValue = unit.toMeters(((Number) to).doubleValue()); - } else { - toValue = DistanceUnit.parse((String) to, unit, DistanceUnit.DEFAULT); - } - if (indexCreatedBeforeV2_2) { - toValue = geoDistance.normalize(toValue, DistanceUnit.DEFAULT); - } - } else { - toValue = GeoUtils.maxRadialDistanceMeters(point.lat(), point.lon()); - } - - final Version indexVersionCreated = context.indexVersionCreated(); - if (indexVersionCreated.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - throw new QueryShardException(context, "[{}] queries are no longer supported for geo_point field types. " - + "Use geo_distance sort or aggregations", NAME); - } else if (indexVersionCreated.before(Version.V_2_2_0)) { - LegacyGeoPointFieldType geoFieldType = (LegacyGeoPointFieldType) fieldType; - IndexGeoPointFieldData indexFieldData = context.getForField(fieldType); - String bboxOptimization = Strings.isEmpty(optimizeBbox) ? DEFAULT_OPTIMIZE_BBOX : optimizeBbox; - return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType, - indexFieldData, bboxOptimization, context); - } - - // if index created V_2_2 use (soon to be legacy) numeric encoding postings format - // if index created V_2_3 > use prefix encoded postings format - final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ? - GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX; - - return new XGeoPointDistanceRangeQuery(fieldType.name(), encoding, point.lat(), point.lon(), - (includeLower) ? fromValue : fromValue + GeoUtils.TOLERANCE, - (includeUpper) ? toValue : toValue - GeoUtils.TOLERANCE); - } - - @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME); - builder.startArray(fieldName).value(point.lon()).value(point.lat()).endArray(); - builder.field(FROM_FIELD.getPreferredName(), from); - builder.field(TO_FIELD.getPreferredName(), to); - builder.field(INCLUDE_LOWER_FIELD.getPreferredName(), includeLower); - builder.field(INCLUDE_UPPER_FIELD.getPreferredName(), includeUpper); - builder.field(UNIT_FIELD.getPreferredName(), unit); - builder.field(DISTANCE_TYPE_FIELD.getPreferredName(), geoDistance.name().toLowerCase(Locale.ROOT)); - if (Strings.isEmpty(optimizeBbox) == false) { - builder.field(OPTIMIZE_BBOX_FIELD.getPreferredName(), optimizeBbox); - } - builder.field(VALIDATION_METHOD.getPreferredName(), validationMethod); - builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped); - printBoostAndQueryName(builder); - builder.endObject(); - } - - public static Optional fromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); - - XContentParser.Token token; - - Float boost = null; - String queryName = null; - String currentFieldName = null; - GeoPoint point = null; - String fieldName = null; - Object vFrom = null; - Object vTo = null; - Boolean includeLower = null; - Boolean includeUpper = null; - DistanceUnit unit = null; - GeoDistance geoDistance = null; - String optimizeBbox = null; - boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING; - boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING; - GeoValidationMethod validationMethod = null; - boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; - - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (parseContext.isDeprecatedSetting(currentFieldName)) { - // skip - } else if (token == XContentParser.Token.START_ARRAY) { - if (fieldName == null) { - if (point == null) { - point = new GeoPoint(); - } - GeoUtils.parseGeoPoint(parser, point); - fieldName = currentFieldName; - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME + - "] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]"); - } - } else if (token == XContentParser.Token.START_OBJECT) { - // the json in the format of -> field : { lat : 30, lon : 12 } - if (fieldName == null) { - fieldName = currentFieldName; - if (point == null) { - point = new GeoPoint(); - } - GeoUtils.parseGeoPoint(parser, point); - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME + - "] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]"); - } - } else if (token.isValue()) { - if (parseContext.getParseFieldMatcher().match(currentFieldName, FROM_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vFrom = parser.text(); // a String - } else { - vFrom = parser.numberValue(); // a Number - } - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, TO_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vTo = parser.text(); // a String - } else { - vTo = parser.numberValue(); // a Number - } - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, INCLUDE_LOWER_FIELD)) { - includeLower = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, INCLUDE_UPPER_FIELD)) { - includeUpper = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_UNMAPPED_FIELD)) { - ignoreUnmapped = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, GT_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vFrom = parser.text(); // a String - } else { - vFrom = parser.numberValue(); // a Number - } - includeLower = false; - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, GTE_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vFrom = parser.text(); // a String - } else { - vFrom = parser.numberValue(); // a Number - } - includeLower = true; - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, LT_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vTo = parser.text(); // a String - } else { - vTo = parser.numberValue(); // a Number - } - includeUpper = false; - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, LTE_FIELD)) { - if (token == XContentParser.Token.VALUE_NULL) { - } else if (token == XContentParser.Token.VALUE_STRING) { - vTo = parser.text(); // a String - } else { - vTo = parser.numberValue(); // a Number - } - includeUpper = true; - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, UNIT_FIELD)) { - unit = DistanceUnit.fromString(parser.text()); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) { - geoDistance = GeoDistance.fromString(parser.text()); - } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) { - String maybeFieldName = currentFieldName.substring(0, - currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length()); - if (fieldName == null || fieldName.equals(maybeFieldName)) { - fieldName = maybeFieldName; - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME + - "] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]"); - } - if (point == null) { - point = new GeoPoint(); - } - point.resetLat(parser.doubleValue()); - } else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) { - String maybeFieldName = currentFieldName.substring(0, - currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length()); - if (fieldName == null || fieldName.equals(maybeFieldName)) { - fieldName = maybeFieldName; - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME + - "] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]"); - } - if (point == null) { - point = new GeoPoint(); - } - point.resetLon(parser.doubleValue()); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, NAME_FIELD)) { - queryName = parser.text(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, BOOST_FIELD)) { - boost = parser.floatValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) { - optimizeBbox = parser.textOrNull(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, COERCE_FIELD)) { - coerce = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) { - ignoreMalformed = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) { - validationMethod = GeoValidationMethod.fromString(parser.text()); - } else { - if (fieldName == null) { - if (point == null) { - point = new GeoPoint(); - } - point.resetFromString(parser.text()); - fieldName = currentFieldName; - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME + - "] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]"); - } - } - } - } - - GeoDistanceRangeQueryBuilder queryBuilder = new GeoDistanceRangeQueryBuilder(fieldName, point); - if (boost != null) { - queryBuilder.boost(boost); - } - - if (queryName != null) { - queryBuilder.queryName(queryName); - } - - if (vFrom != null) { - if (vFrom instanceof Number) { - queryBuilder.from((Number) vFrom); - } else { - queryBuilder.from((String) vFrom); - } - } - - if (vTo != null) { - if (vTo instanceof Number) { - queryBuilder.to((Number) vTo); - } else { - queryBuilder.to((String) vTo); - } - } - - if (includeUpper != null) { - queryBuilder.includeUpper(includeUpper); - } - - if (includeLower != null) { - queryBuilder.includeLower(includeLower); - } - - if (unit != null) { - queryBuilder.unit(unit); - } - - if (geoDistance != null) { - queryBuilder.geoDistance(geoDistance); - } - - if (optimizeBbox != null) { - queryBuilder.optimizeBbox(optimizeBbox); - } - - if (validationMethod != null) { - // if validation method is set explicitly ignore deprecated coerce/ignore malformed fields if any - queryBuilder.setValidationMethod(validationMethod); - } else { - queryBuilder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed)); - } - queryBuilder.ignoreUnmapped(ignoreUnmapped); - return Optional.of(queryBuilder); - } - - @Override - protected boolean doEquals(GeoDistanceRangeQueryBuilder other) { - return ((Objects.equals(fieldName, other.fieldName)) && - (Objects.equals(point, other.point)) && - (Objects.equals(from, other.from)) && - (Objects.equals(to, other.to)) && - (Objects.equals(includeUpper, other.includeUpper)) && - (Objects.equals(includeLower, other.includeLower)) && - (Objects.equals(geoDistance, other.geoDistance)) && - (Objects.equals(optimizeBbox, other.optimizeBbox)) && - (Objects.equals(validationMethod, other.validationMethod))) && - Objects.equals(ignoreUnmapped, other.ignoreUnmapped); - } - - @Override - protected int doHashCode() { - return Objects.hash(fieldName, point, from, to, includeUpper, includeLower, geoDistance, optimizeBbox, validationMethod, - ignoreUnmapped); - } - - @Override - public String getWriteableName() { - return NAME; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java index e5e8e69fd54..b86d28c9088 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoPolygonQueryBuilder.java @@ -23,9 +23,6 @@ import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.geo.Polygon; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -36,11 +33,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.search.geo.GeoPolygonQuery; import java.io.IOException; import java.util.ArrayList; @@ -181,10 +175,9 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder use prefix encoded postings format - final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ? - GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX; - return new GeoPointInPolygonQuery(fieldType.name(), encoding, lats, lons); + return LatLonPoint.newPolygonQuery(fieldType.name(), new Polygon(lats, lons)); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java b/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java deleted file mode 100644 index ab3b23af0fc..00000000000 --- a/core/src/main/java/org/elasticsearch/index/query/GeohashCellQuery.java +++ /dev/null @@ -1,390 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.geo.GeoHashUtils; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.mapper.MappedFieldType; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -/** - * A geohash cell filter that filters {@link GeoPoint}s by their geohashes. Basically the a - * Geohash prefix is defined by the filter and all geohashes that are matching this - * prefix will be returned. The neighbors flag allows to filter - * geohashes that surround the given geohash. In general the neighborhood of a - * geohash is defined by its eight adjacent cells.
- * The structure of the {@link GeohashCellQuery} is defined as: - *
- * "geohash_bbox" {
- *     "field":"location",
- *     "geohash":"u33d8u5dkx8k",
- *     "neighbors":false
- * }
- * 
- */ -public class GeohashCellQuery { - public static final String NAME = "geohash_cell"; - - public static final boolean DEFAULT_NEIGHBORS = false; - - /** - * The default value for ignore_unmapped. - */ - public static final boolean DEFAULT_IGNORE_UNMAPPED = false; - - private static final ParseField NEIGHBORS_FIELD = new ParseField("neighbors"); - private static final ParseField PRECISION_FIELD = new ParseField("precision"); - private static final ParseField IGNORE_UNMAPPED_FIELD = new ParseField("ignore_unmapped"); - - /** - * Create a new geohash filter for a given set of geohashes. In general this method - * returns a boolean filter combining the geohashes OR-wise. - * - * @param context Context of the filter - * @param fieldType field mapper for geopoints - * @param geohash mandatory geohash - * @param geohashes optional array of additional geohashes - * @return a new GeoBoundinboxfilter - */ - public static Query create(QueryShardContext context, BaseGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, - String geohash, @Nullable List geohashes) { - MappedFieldType geoHashMapper = fieldType.geoHashFieldType(); - if (geoHashMapper == null) { - throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled"); - } - - if (geohashes == null || geohashes.size() == 0) { - return geoHashMapper.termQuery(geohash, context); - } else { - geohashes.add(geohash); - return geoHashMapper.termsQuery(geohashes, context); - } - } - - /** - * Builder for a geohashfilter. It needs the fields fieldname and - * geohash to be set. the default for a neighbor filteing is - * false. - */ - public static class Builder extends AbstractQueryBuilder { - // we need to store the geohash rather than the corresponding point, - // because a transformation from a geohash to a point an back to the - // geohash will extend the accuracy of the hash to max precision - // i.e. by filing up with z's. - private String fieldName; - private String geohash; - private Integer levels = null; - private boolean neighbors = DEFAULT_NEIGHBORS; - - private boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; - - public Builder(String field, GeoPoint point) { - this(field, point == null ? null : point.geohash(), false); - } - - public Builder(String field, String geohash) { - this(field, geohash, false); - } - - public Builder(String field, String geohash, boolean neighbors) { - if (Strings.isEmpty(field)) { - throw new IllegalArgumentException("fieldName must not be null"); - } - if (Strings.isEmpty(geohash)) { - throw new IllegalArgumentException("geohash or point must be defined"); - } - this.fieldName = field; - this.geohash = geohash; - this.neighbors = neighbors; - } - - /** - * Read from a stream. - */ - public Builder(StreamInput in) throws IOException { - super(in); - fieldName = in.readString(); - geohash = in.readString(); - levels = in.readOptionalVInt(); - neighbors = in.readBoolean(); - ignoreUnmapped = in.readBoolean(); - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - out.writeString(fieldName); - out.writeString(geohash); - out.writeOptionalVInt(levels); - out.writeBoolean(neighbors); - out.writeBoolean(ignoreUnmapped); - } - - public Builder point(GeoPoint point) { - this.geohash = point.getGeohash(); - return this; - } - - public Builder point(double lat, double lon) { - this.geohash = GeoHashUtils.stringEncode(lon, lat); - return this; - } - - public Builder geohash(String geohash) { - this.geohash = geohash; - return this; - } - - public String geohash() { - return geohash; - } - - public Builder precision(int levels) { - if (levels <= 0) { - throw new IllegalArgumentException("precision must be greater than 0. Found [" + levels + "]"); - } - this.levels = levels; - return this; - } - - public Integer precision() { - return levels; - } - - public Builder precision(String precision) { - double meters = DistanceUnit.parse(precision, DistanceUnit.DEFAULT, DistanceUnit.METERS); - return precision(GeoUtils.geoHashLevelsForPrecision(meters)); - } - - public Builder neighbors(boolean neighbors) { - this.neighbors = neighbors; - return this; - } - - public boolean neighbors() { - return neighbors; - } - - public Builder fieldName(String fieldName) { - this.fieldName = fieldName; - return this; - } - - public String fieldName() { - return fieldName; - } - - /** - * Sets whether the query builder should ignore unmapped fields (and run - * a {@link MatchNoDocsQuery} in place of this query) or throw an - * exception if the field is unmapped. - */ - public GeohashCellQuery.Builder ignoreUnmapped(boolean ignoreUnmapped) { - this.ignoreUnmapped = ignoreUnmapped; - return this; - } - - /** - * Gets whether the query builder will ignore unmapped fields (and run a - * {@link MatchNoDocsQuery} in place of this query) or throw an - * exception if the field is unmapped. - */ - public boolean ignoreUnmapped() { - return ignoreUnmapped; - } - - @Override - protected Query doToQuery(QueryShardContext context) throws IOException { - MappedFieldType fieldType = context.fieldMapper(fieldName); - if (fieldType == null) { - if (ignoreUnmapped) { - return new MatchNoDocsQuery(); - } else { - throw new QueryShardException(context, "failed to parse [{}] query. missing [{}] field [{}]", NAME, - BaseGeoPointFieldMapper.CONTENT_TYPE, fieldName); - } - } - - if (fieldType instanceof LatLonPointFieldMapper.LatLonPointFieldType) { - throw new QueryShardException(context, "failed to parse [{}] query. " - + "geo_point field no longer supports geohash_cell queries", NAME); - } else if (!(fieldType instanceof BaseGeoPointFieldMapper.LegacyGeoPointFieldType)) { - throw new QueryShardException(context, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName); - } - - BaseGeoPointFieldMapper.LegacyGeoPointFieldType geoFieldType = ((BaseGeoPointFieldMapper.LegacyGeoPointFieldType) fieldType); - if (!geoFieldType.isGeoHashPrefixEnabled()) { - throw new QueryShardException(context, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME, - fieldName); - } - - String geohash = this.geohash; - if (levels != null) { - int len = Math.min(levels, geohash.length()); - geohash = geohash.substring(0, len); - } - - Query query; - if (neighbors) { - query = create(context, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList(8))); - } else { - query = create(context, geoFieldType, geohash, null); - } - return query; - } - - @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME); - builder.field(NEIGHBORS_FIELD.getPreferredName(), neighbors); - if (levels != null) { - builder.field(PRECISION_FIELD.getPreferredName(), levels); - } - builder.field(fieldName, geohash); - builder.field(IGNORE_UNMAPPED_FIELD.getPreferredName(), ignoreUnmapped); - printBoostAndQueryName(builder); - builder.endObject(); - } - - public static Optional fromXContent(QueryParseContext parseContext) throws IOException { - XContentParser parser = parseContext.parser(); - - String fieldName = null; - String geohash = null; - Integer levels = null; - Boolean neighbors = null; - String queryName = null; - Float boost = null; - boolean ignoreUnmapped = DEFAULT_IGNORE_UNMAPPED; - - XContentParser.Token token; - if ((token = parser.currentToken()) != Token.START_OBJECT) { - throw new ElasticsearchParseException("failed to parse [{}] query. expected an object but found [{}] instead", NAME, token); - } - - while ((token = parser.nextToken()) != Token.END_OBJECT) { - if (token == Token.FIELD_NAME) { - String field = parser.currentName(); - - if (parseContext.isDeprecatedSetting(field)) { - // skip - } else if (parseContext.getParseFieldMatcher().match(field, PRECISION_FIELD)) { - token = parser.nextToken(); - if (token == Token.VALUE_NUMBER) { - levels = parser.intValue(); - } else if (token == Token.VALUE_STRING) { - double meters = DistanceUnit.parse(parser.text(), DistanceUnit.DEFAULT, DistanceUnit.METERS); - levels = GeoUtils.geoHashLevelsForPrecision(meters); - } - } else if (parseContext.getParseFieldMatcher().match(field, NEIGHBORS_FIELD)) { - parser.nextToken(); - neighbors = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(field, AbstractQueryBuilder.NAME_FIELD)) { - parser.nextToken(); - queryName = parser.text(); - } else if (parseContext.getParseFieldMatcher().match(field, IGNORE_UNMAPPED_FIELD)) { - parser.nextToken(); - ignoreUnmapped = parser.booleanValue(); - } else if (parseContext.getParseFieldMatcher().match(field, AbstractQueryBuilder.BOOST_FIELD)) { - parser.nextToken(); - boost = parser.floatValue(); - } else { - if (fieldName == null) { - fieldName = field; - token = parser.nextToken(); - if (token == Token.VALUE_STRING) { - // A string indicates either a geohash or a - // lat/lon - // string - String location = parser.text(); - if (location.indexOf(",") > 0) { - geohash = GeoUtils.parseGeoPoint(parser).geohash(); - } else { - geohash = location; - } - } else { - geohash = GeoUtils.parseGeoPoint(parser).geohash(); - } - } else { - throw new ParsingException(parser.getTokenLocation(), "[" + NAME + - "] field name already set to [" + fieldName + "] but found [" + field + "]"); - } - } - } else { - throw new ElasticsearchParseException("failed to parse [{}] query. unexpected token [{}]", NAME, token); - } - } - Builder builder = new Builder(fieldName, geohash); - if (levels != null) { - builder.precision(levels); - } - if (neighbors != null) { - builder.neighbors(neighbors); - } - if (queryName != null) { - builder.queryName(queryName); - } - if (boost != null) { - builder.boost(boost); - } - builder.ignoreUnmapped(ignoreUnmapped); - return Optional.of(builder); - } - - @Override - protected boolean doEquals(Builder other) { - return Objects.equals(fieldName, other.fieldName) - && Objects.equals(geohash, other.geohash) - && Objects.equals(levels, other.levels) - && Objects.equals(neighbors, other.neighbors) - && Objects.equals(ignoreUnmapped, other.ignoreUnmapped); - } - - @Override - protected int doHashCode() { - return Objects.hash(fieldName, geohash, levels, neighbors, ignoreUnmapped); - } - - @Override - public String getWriteableName() { - return NAME; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/query/IndicesQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/IndicesQueryBuilder.java deleted file mode 100644 index 734d4cda922..00000000000 --- a/core/src/main/java/org/elasticsearch/index/query/IndicesQueryBuilder.java +++ /dev/null @@ -1,256 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.search.Query; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Objects; -import java.util.Optional; - -/** - * A query that will execute the wrapped query only for the specified indices, - * and "match_all" when it does not match those indices (by default). - * - * @deprecated instead search on the `_index` field - */ -@Deprecated -// TODO remove this class in 6.0 -public class IndicesQueryBuilder extends AbstractQueryBuilder { - - public static final String NAME = "indices"; - - private static final ParseField QUERY_FIELD = new ParseField("query"); - private static final ParseField NO_MATCH_QUERY = new ParseField("no_match_query"); - private static final ParseField INDEX_FIELD = new ParseField("index"); - private static final ParseField INDICES_FIELD = new ParseField("indices"); - - private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(IndicesQueryBuilder.class)); - - private final QueryBuilder innerQuery; - - private final String[] indices; - - private QueryBuilder noMatchQuery = defaultNoMatchQuery(); - - /** - * @deprecated instead search on the `_index` field - */ - @Deprecated - public IndicesQueryBuilder(QueryBuilder innerQuery, String... indices) { - DEPRECATION_LOGGER.deprecated("{} query is deprecated. Instead search on the '_index' field", NAME); - if (innerQuery == null) { - throw new IllegalArgumentException("inner query cannot be null"); - } - if (indices == null || indices.length == 0) { - throw new IllegalArgumentException("list of indices cannot be null or empty"); - } - this.innerQuery = Objects.requireNonNull(innerQuery); - this.indices = indices; - } - - /** - * Read from a stream. - */ - public IndicesQueryBuilder(StreamInput in) throws IOException { - super(in); - innerQuery = in.readNamedWriteable(QueryBuilder.class); - indices = in.readStringArray(); - noMatchQuery = in.readNamedWriteable(QueryBuilder.class); - } - - @Override - protected void doWriteTo(StreamOutput out) throws IOException { - out.writeNamedWriteable(innerQuery); - out.writeStringArray(indices); - out.writeNamedWriteable(noMatchQuery); - } - - public QueryBuilder innerQuery() { - return this.innerQuery; - } - - public String[] indices() { - return this.indices; - } - - /** - * Sets the query to use when it executes on an index that does not match the indices provided. - */ - public IndicesQueryBuilder noMatchQuery(QueryBuilder noMatchQuery) { - if (noMatchQuery == null) { - throw new IllegalArgumentException("noMatch query cannot be null"); - } - this.noMatchQuery = noMatchQuery; - return this; - } - - /** - * Sets the no match query, can either be all or none. - */ - public IndicesQueryBuilder noMatchQuery(String type) { - this.noMatchQuery = parseNoMatchQuery(type); - return this; - } - - public QueryBuilder noMatchQuery() { - return this.noMatchQuery; - } - - private static QueryBuilder defaultNoMatchQuery() { - return QueryBuilders.matchAllQuery(); - } - - @Override - protected void doXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(NAME); - builder.array(INDICES_FIELD.getPreferredName(), indices); - builder.field(QUERY_FIELD.getPreferredName()); - innerQuery.toXContent(builder, params); - builder.field(NO_MATCH_QUERY.getPreferredName()); - noMatchQuery.toXContent(builder, params); - printBoostAndQueryName(builder); - builder.endObject(); - } - - public static Optional fromXContent(QueryParseContext parseContext) throws IOException, ParsingException { - XContentParser parser = parseContext.parser(); - - QueryBuilder innerQuery = null; - Collection indices = new ArrayList<>(); - QueryBuilder noMatchQuery = defaultNoMatchQuery(); - - String queryName = null; - float boost = AbstractQueryBuilder.DEFAULT_BOOST; - - String currentFieldName = null; - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token == XContentParser.Token.START_OBJECT) { - if (parseContext.getParseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { - // the 2.0 behaviour when encountering "query" : {} is to return no docs for matching indices - innerQuery = parseContext.parseInnerQueryBuilder().orElse(new MatchNoneQueryBuilder()); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) { - noMatchQuery = parseContext.parseInnerQueryBuilder().orElse(defaultNoMatchQuery()); - } else { - throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]"); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (parseContext.getParseFieldMatcher().match(currentFieldName, INDICES_FIELD)) { - if (indices.isEmpty() == false) { - throw new ParsingException(parser.getTokenLocation(), "[indices] indices or index already specified"); - } - while (parser.nextToken() != XContentParser.Token.END_ARRAY) { - String value = parser.textOrNull(); - if (value == null) { - throw new ParsingException(parser.getTokenLocation(), "[indices] no value specified for 'indices' entry"); - } - indices.add(value); - } - } else { - throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]"); - } - } else if (token.isValue()) { - if (parseContext.getParseFieldMatcher().match(currentFieldName, INDEX_FIELD)) { - if (indices.isEmpty() == false) { - throw new ParsingException(parser.getTokenLocation(), "[indices] indices or index already specified"); - } - indices.add(parser.text()); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) { - noMatchQuery = parseNoMatchQuery(parser.text()); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { - queryName = parser.text(); - } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { - boost = parser.floatValue(); - } else { - throw new ParsingException(parser.getTokenLocation(), "[indices] query does not support [" + currentFieldName + "]"); - } - } - } - - if (innerQuery == null) { - throw new ParsingException(parser.getTokenLocation(), "[indices] requires 'query' element"); - } - if (indices.isEmpty()) { - throw new ParsingException(parser.getTokenLocation(), "[indices] requires 'indices' or 'index' element"); - } - return Optional.of(new IndicesQueryBuilder(innerQuery, indices.toArray(new String[indices.size()])) - .noMatchQuery(noMatchQuery) - .boost(boost) - .queryName(queryName)); - } - - static QueryBuilder parseNoMatchQuery(String type) { - if ("all".equals(type)) { - return QueryBuilders.matchAllQuery(); - } else if ("none".equals(type)) { - return new MatchNoneQueryBuilder(); - } - throw new IllegalArgumentException("query type can only be [all] or [none] but not " + "[" + type + "]"); - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - protected Query doToQuery(QueryShardContext context) throws IOException { - if (context.matchesIndices(indices)) { - return innerQuery.toQuery(context); - } - return noMatchQuery.toQuery(context); - } - - @Override - public int doHashCode() { - return Objects.hash(innerQuery, noMatchQuery, Arrays.hashCode(indices)); - } - - @Override - protected boolean doEquals(IndicesQueryBuilder other) { - return Objects.equals(innerQuery, other.innerQuery) && - Arrays.equals(indices, other.indices) && // otherwise we are comparing pointers - Objects.equals(noMatchQuery, other.noMatchQuery); - } - - @Override - protected QueryBuilder doRewrite(QueryRewriteContext queryShardContext) throws IOException { - QueryBuilder newInnnerQuery = innerQuery.rewrite(queryShardContext); - QueryBuilder newNoMatchQuery = noMatchQuery.rewrite(queryShardContext); - if (newInnnerQuery != innerQuery || newNoMatchQuery != noMatchQuery) { - return new IndicesQueryBuilder(innerQuery, indices).noMatchQuery(noMatchQuery); - } - return this; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index 25397d2a3ee..c5af11c89ac 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.GraphQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; @@ -36,6 +37,7 @@ import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery; import java.io.IOException; +import java.util.List; import java.util.Locale; import java.util.Objects; import java.util.Optional; @@ -62,8 +64,6 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { /** The name for the match query */ public static final String NAME = "match"; - public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME, "match_fuzzy", "fuzzy_match"); - /** The default mode terms are combined in a match query */ public static final Operator DEFAULT_OPERATOR = Operator.OR; @@ -471,9 +471,25 @@ public class MatchQueryBuilder extends AbstractQueryBuilder { // and multiple variations of the same word in the query (synonyms for instance). if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) { query = Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); + } else if (query instanceof GraphQuery && ((GraphQuery) query).hasBoolean()) { + // we have a graph query that has at least one boolean sub-query + // re-build and set minimum should match value on all boolean queries + List oldQueries = ((GraphQuery) query).getQueries(); + Query[] queries = new Query[oldQueries.size()]; + for (int i = 0; i < queries.length; i++) { + Query oldQuery = oldQueries.get(i); + if (oldQuery instanceof BooleanQuery) { + queries[i] = Queries.applyMinimumShouldMatch((BooleanQuery) oldQuery, minimumShouldMatch); + } else { + queries[i] = oldQuery; + } + } + + query = new GraphQuery(queries); } else if (query instanceof ExtendedCommonTermsQuery) { ((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch); } + return query; } diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index 7ba39d7b348..da3cc85ee98 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -55,7 +55,6 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.UidFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType; -import org.elasticsearch.index.mapper.StringFieldMapper.StringFieldType; import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; import java.io.IOException; @@ -80,7 +79,6 @@ import static org.elasticsearch.index.mapper.Uid.createUidAsBytes; */ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "more_like_this"; - public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME, "mlt"); public static final int DEFAULT_MAX_QUERY_TERMS = XMoreLikeThis.DEFAULT_MAX_QUERY_TERMS; public static final int DEFAULT_MIN_TERM_FREQ = XMoreLikeThis.DEFAULT_MIN_TERM_FREQ; @@ -94,7 +92,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder> SUPPORTED_FIELD_TYPES = new HashSet<>( - Arrays.asList(StringFieldType.class, TextFieldType.class, KeywordFieldType.class)); + Arrays.asList(TextFieldType.class, KeywordFieldType.class)); private interface Field { ParseField FIELDS = new ParseField("fields"); diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java index 2765eaa38ca..229c7200f19 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryBuilders.java @@ -585,18 +585,6 @@ public abstract class QueryBuilders { return new TermsQueryBuilder(name, values); } - /** - * A query that will execute the wrapped query only for the specified - * indices, and "match_all" when it does not match those indices. - * - * @deprecated instead search on the `_index` field - */ - @Deprecated - public static IndicesQueryBuilder indicesQuery(QueryBuilder queryBuilder, String... indices) { - // TODO remove this method in 6.0 - return new IndicesQueryBuilder(queryBuilder, indices); - } - /** * A Query builder which allows building a query thanks to a JSON string or binary data. */ @@ -651,37 +639,6 @@ public abstract class QueryBuilders { return new GeoDistanceQueryBuilder(name); } - /** - * A filter to filter based on a specific range from a specific geo location / point. - * - * @param name The location field name. - * @param point The point - */ - public static GeoDistanceRangeQueryBuilder geoDistanceRangeQuery(String name, GeoPoint point) { - return new GeoDistanceRangeQueryBuilder(name, point); - } - - /** - * A filter to filter based on a specific range from a specific geo location / point. - * - * @param name The location field name. - * @param geohash The point as geohash - */ - public static GeoDistanceRangeQueryBuilder geoDistanceRangeQuery(String name, String geohash) { - return new GeoDistanceRangeQueryBuilder(name, geohash); - } - - /** - * A filter to filter based on a specific range from a specific geo location / point. - * - * @param name The location field name. - * @param lat The points latitude - * @param lon The points longitude - */ - public static GeoDistanceRangeQueryBuilder geoDistanceRangeQuery(String name, double lat, double lon) { - return new GeoDistanceRangeQueryBuilder(name, lat, lon); - } - /** * A filter to filter based on a bounding box defined by top left and bottom right locations / points * @@ -691,43 +648,6 @@ public abstract class QueryBuilders { return new GeoBoundingBoxQueryBuilder(name); } - /** - * A filter based on a bounding box defined by geohash. The field this filter is applied to - * must have {"type":"geo_point", "geohash":true} - * to work. - * - * @param name The geo point field name. - * @param geohash The Geohash to filter - */ - public static GeohashCellQuery.Builder geoHashCellQuery(String name, String geohash) { - return new GeohashCellQuery.Builder(name, geohash); - } - - /** - * A filter based on a bounding box defined by geohash. The field this filter is applied to - * must have {"type":"geo_point", "geohash":true} - * to work. - * - * @param name The geo point field name. - * @param point a geo point within the geohash bucket - */ - public static GeohashCellQuery.Builder geoHashCellQuery(String name, GeoPoint point) { - return new GeohashCellQuery.Builder(name, point); - } - - /** - * A filter based on a bounding box defined by geohash. The field this filter is applied to - * must have {"type":"geo_point", "geohash":true} - * to work. - * - * @param name The geo point field name - * @param geohash The Geohash to filter - * @param neighbors should the neighbor cell also be filtered - */ - public static GeohashCellQuery.Builder geoHashCellQuery(String name, String geohash, boolean neighbors) { - return new GeohashCellQuery.Builder(name, geohash, neighbors); - } - /** * A filter to filter based on a polygon defined by a set of locations / points. * diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index df542345911..e71322eb0ad 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -19,16 +19,6 @@ package org.elasticsearch.index.query; -import static java.util.Collections.unmodifiableMap; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashMap; -import java.util.Map; -import java.util.function.Function; -import java.util.function.LongSupplier; - import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.IndexReader; import org.apache.lucene.queryparser.classic.MapperQueryParser; @@ -67,6 +57,16 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.lookup.SearchLookup; +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; +import java.util.function.LongSupplier; + +import static java.util.Collections.unmodifiableMap; + /** * Context object used to create lucene queries on the shard level. */ @@ -282,15 +282,6 @@ public class QueryShardContext extends QueryRewriteContext { return indexSettings.getIndexVersionCreated(); } - public boolean matchesIndices(String... indices) { - for (String index : indices) { - if (indexSettings.matchesIndexName(index)) { - return true; - } - } - return false; - } - public ParsedQuery toFilter(QueryBuilder queryBuilder) { return toQuery(queryBuilder, q -> { Query filter = q.toFilter(this); diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 8ef6f3b1473..d019b20b425 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -44,9 +44,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ScaledFloatFieldMapper; -import org.elasticsearch.index.mapper.StringFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.query.support.QueryParsers; import org.joda.time.DateTimeZone; @@ -129,9 +127,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder i private static final ParseField FIELDDATA_FIELD = new ParseField("fielddata").withAllDeprecated("[no replacement]"); private static final ParseField NAME_FIELD = new ParseField("_name") - .withAllDeprecated("query name is not supported in short version of range query"); - private static final ParseField LTE_FIELD = new ParseField("lte", "le"); - private static final ParseField GTE_FIELD = new ParseField("gte", "ge"); - private static final ParseField FROM_FIELD = new ParseField("from"); - private static final ParseField TO_FIELD = new ParseField("to"); + .withAllDeprecated("query name is not supported in short version of range query"); + public static final ParseField LTE_FIELD = new ParseField("lte", "le"); + public static final ParseField GTE_FIELD = new ParseField("gte", "ge"); + public static final ParseField FROM_FIELD = new ParseField("from"); + public static final ParseField TO_FIELD = new ParseField("to"); private static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower"); private static final ParseField INCLUDE_UPPER_FIELD = new ParseField("include_upper"); - private static final ParseField GT_FIELD = new ParseField("gt"); - private static final ParseField LT_FIELD = new ParseField("lt"); + public static final ParseField GT_FIELD = new ParseField("gt"); + public static final ParseField LT_FIELD = new ParseField("lt"); private static final ParseField TIME_ZONE_FIELD = new ParseField("time_zone"); private static final ParseField FORMAT_FIELD = new ParseField("format"); + private static final ParseField RELATION_FIELD = new ParseField("relation"); private final String fieldName; @@ -81,6 +84,8 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i private FormatDateTimeFormatter format; + private ShapeRelation relation; + /** * A Query that matches documents within an range of terms. * @@ -108,6 +113,12 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i if (formatString != null) { format = Joda.forPattern(formatString); } + if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) { + String relationString = in.readOptionalString(); + if (relationString != null) { + relation = ShapeRelation.getRelationByName(relationString); + } + } } @Override @@ -123,6 +134,13 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i formatString = this.format.format(); } out.writeOptionalString(formatString); + if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) { + String relationString = null; + if (this.relation != null) { + relationString = this.relation.getRelationName(); + } + out.writeOptionalString(relationString); + } } /** @@ -283,12 +301,27 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i } DateMathParser getForceDateParser() { // pkg private for testing - if (this.format != null) { + if (this.format != null) { return new DateMathParser(this.format); } return null; } + public ShapeRelation relation() { + return this.relation; + } + + public RangeQueryBuilder relation(String relation) { + if (relation == null) { + throw new IllegalArgumentException("relation cannot be null"); + } + this.relation = ShapeRelation.getRelationByName(relation); + if (this.relation == null) { + throw new IllegalArgumentException(relation + " is not a valid relation"); + } + return this; + } + @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); @@ -303,6 +336,9 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i if (format != null) { builder.field(FORMAT_FIELD.getPreferredName(), format.format()); } + if (relation != null) { + builder.field(RELATION_FIELD.getPreferredName(), relation.getRelationName()); + } printBoostAndQueryName(builder); builder.endObject(); builder.endObject(); @@ -320,6 +356,7 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; String format = null; + String relation = null; String currentFieldName = null; XContentParser.Token token; @@ -361,6 +398,8 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i timeZone = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, FORMAT_FIELD)) { format = parser.text(); + } else if (parseContext.getParseFieldMatcher().match(currentFieldName, RELATION_FIELD)) { + relation = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { queryName = parser.text(); } else { @@ -393,6 +432,9 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i if (format != null) { rangeQuery.format(format); } + if (relation != null) { + rangeQuery.relation(relation); + } return Optional.of(rangeQuery); } @@ -450,15 +492,18 @@ public class RangeQueryBuilder extends AbstractQueryBuilder i Query query = null; MappedFieldType mapper = context.fieldMapper(this.fieldName); if (mapper != null) { - if (mapper instanceof LegacyDateFieldMapper.DateFieldType) { - - query = ((LegacyDateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper, - timeZone, getForceDateParser(), context); - } else if (mapper instanceof DateFieldMapper.DateFieldType) { + if (mapper instanceof DateFieldMapper.DateFieldType) { query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper, timeZone, getForceDateParser(), context); - } else { + } else if (mapper instanceof RangeFieldMapper.RangeFieldType && mapper.typeName() == RangeFieldMapper.RangeType.DATE.name) { + DateMathParser forcedDateParser = null; + if (this.format != null) { + forcedDateParser = new DateMathParser(this.format); + } + query = ((RangeFieldMapper.RangeFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper, + relation, timeZone, forcedDateParser, context); + } else { if (timeZone != null) { throw new QueryShardException(context, "[range] time_zone can not be applied to non date field [" + fieldName + "]"); diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index aa82b520684..ae30021d5ed 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -159,19 +159,19 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder { public static final String NAME = "terms"; - public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME, "in"); private final String fieldName; private final List values; @@ -80,7 +81,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { throw new IllegalArgumentException("Both values and termsLookup specified for terms query"); } this.fieldName = fieldName; - this.values = values; + this.values = values == null ? null : convert(values); this.termsLookup = termsLookup; } @@ -159,7 +160,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { throw new IllegalArgumentException("No value specified for terms query"); } this.fieldName = fieldName; - this.values = convertToBytesRefListIfStringList(values); + this.values = convert(values); this.termsLookup = null; } @@ -185,43 +186,125 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { } public List values() { - return convertToStringListIfBytesRefList(this.values); + return convertBack(this.values); } public TermsLookup termsLookup() { return this.termsLookup; } + private static final Set> INTEGER_TYPES = new HashSet<>( + Arrays.asList(Byte.class, Short.class, Integer.class, Long.class)); + private static final Set> STRING_TYPES = new HashSet<>( + Arrays.asList(BytesRef.class, String.class)); + /** - * Same as {@link #convertToBytesRefIfString} but on Iterable. - * @param objs the Iterable of input object - * @return the same input or a list of {@link BytesRef} representation if input was a list of type string + * Same as {@link #convert(List)} but on an {@link Iterable}. */ - private static List convertToBytesRefListIfStringList(Iterable objs) { - if (objs == null) { - return null; + private static List convert(Iterable values) { + List list; + if (values instanceof List) { + list = (List) values; + } else { + ArrayList arrayList = new ArrayList(); + for (Object o : values) { + arrayList.add(o); + } + list = arrayList; } - List newObjs = new ArrayList<>(); - for (Object obj : objs) { - newObjs.add(convertToBytesRefIfString(obj)); - } - return newObjs; + return convert(list); } /** - * Same as {@link #convertToStringIfBytesRef} but on Iterable. - * @param objs the Iterable of input object - * @return the same input or a list of utf8 string if input was a list of type {@link BytesRef} + * Convert the list in a way that optimizes storage in the case that all + * elements are either integers or {@link String}s/{@link BytesRef}s. This + * is useful to help garbage collections for use-cases that involve sending + * very large terms queries to Elasticsearch. If the list does not only + * contain integers or {@link String}s, then a list is returned where all + * {@link String}s have been replaced with {@link BytesRef}s. */ - private static List convertToStringListIfBytesRefList(Iterable objs) { - if (objs == null) { - return null; + static List convert(List list) { + if (list.isEmpty()) { + return Collections.emptyList(); } - List newObjs = new ArrayList<>(); - for (Object obj : objs) { - newObjs.add(convertToStringIfBytesRef(obj)); + + final boolean allNumbers = list.stream().allMatch(o -> o != null && INTEGER_TYPES.contains(o.getClass())); + if (allNumbers) { + final long[] elements = list.stream().mapToLong(o -> ((Number) o).longValue()).toArray(); + return new AbstractList() { + @Override + public Object get(int index) { + return elements[index]; + } + @Override + public int size() { + return elements.length; + } + }; } - return newObjs; + + final boolean allStrings = list.stream().allMatch(o -> o != null && STRING_TYPES.contains(o.getClass())); + if (allStrings) { + final BytesRefBuilder builder = new BytesRefBuilder(); + try (final BytesStreamOutput bytesOut = new BytesStreamOutput()) { + final int[] endOffsets = new int[list.size()]; + int i = 0; + for (Object o : list) { + BytesRef b; + if (o instanceof BytesRef) { + b = (BytesRef) o; + } else { + builder.copyChars(o.toString()); + b = builder.get(); + } + bytesOut.writeBytes(b.bytes, b.offset, b.length); + if (i == 0) { + endOffsets[0] = b.length; + } else { + endOffsets[i] = Math.addExact(endOffsets[i-1], b.length); + } + ++i; + } + final BytesReference bytes = bytesOut.bytes(); + return new AbstractList() { + public Object get(int i) { + final int startOffset = i == 0 ? 0 : endOffsets[i-1]; + final int endOffset = endOffsets[i]; + return bytes.slice(startOffset, endOffset - startOffset).toBytesRef(); + } + public int size() { + return endOffsets.length; + } + }; + } + } + + return list.stream().map(o -> o instanceof String ? new BytesRef(o.toString()) : o).collect(Collectors.toList()); + } + + /** + * Convert the internal {@link List} of values back to a user-friendly list. + * Integers are kept as-is since the terms query does not make any difference + * between {@link Integer}s and {@link Long}s, but {@link BytesRef}s are + * converted back to {@link String}s. + */ + static List convertBack(List list) { + return new AbstractList() { + @Override + public int size() { + return list.size(); + } + @Override + public Object get(int index) { + Object o = list.get(index); + if (o instanceof BytesRef) { + o = ((BytesRef) o).utf8ToString(); + } + // we do not convert longs, all integer types are equivalent + // as far as this query is concerned + return o; + } + }; } @Override @@ -232,7 +315,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { termsLookup.toXContent(builder, params); builder.endObject(); } else { - builder.field(fieldName, convertToStringListIfBytesRefList(values)); + builder.field(fieldName, convertBack(values)); } printBoostAndQueryName(builder); builder.endObject(); @@ -319,7 +402,17 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { if (values == null || values.isEmpty()) { return Queries.newMatchNoDocsQuery("No terms supplied for \"" + getName() + "\" query."); } - return handleTermsQuery(values, fieldName, context); + MappedFieldType fieldType = context.fieldMapper(fieldName); + + if (fieldType != null) { + return fieldType.termsQuery(values, context); + } else { + BytesRef[] filterValues = new BytesRef[values.size()]; + for (int i = 0; i < filterValues.length; i++) { + filterValues[i] = BytesRefs.toBytesRef(values.get(i)); + } + return new TermsQuery(fieldName, filterValues); + } } private List fetch(TermsLookup termsLookup, Client client) { @@ -334,40 +427,6 @@ public class TermsQueryBuilder extends AbstractQueryBuilder { return terms; } - private static Query handleTermsQuery(List terms, String fieldName, QueryShardContext context) { - MappedFieldType fieldType = context.fieldMapper(fieldName); - String indexFieldName; - if (fieldType != null) { - indexFieldName = fieldType.name(); - } else { - indexFieldName = fieldName; - } - - Query query; - if (context.isFilter()) { - if (fieldType != null) { - query = fieldType.termsQuery(terms, context); - } else { - BytesRef[] filterValues = new BytesRef[terms.size()]; - for (int i = 0; i < filterValues.length; i++) { - filterValues[i] = BytesRefs.toBytesRef(terms.get(i)); - } - query = new TermsQuery(indexFieldName, filterValues); - } - } else { - BooleanQuery.Builder bq = new BooleanQuery.Builder(); - for (Object term : terms) { - if (fieldType != null) { - bq.add(fieldType.termQuery(term, context), BooleanClause.Occur.SHOULD); - } else { - bq.add(new TermQuery(new Term(indexFieldName, BytesRefs.toBytesRef(term))), BooleanClause.Occur.SHOULD); - } - } - query = bq.build(); - } - return query; - } - @Override protected int doHashCode() { return Objects.hash(fieldName, values, termsLookup); diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java index e1df7ec7706..cf36ea9dffd 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java @@ -44,11 +44,8 @@ import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.LegacyDateFieldMapper; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.search.MultiValueMode; @@ -205,13 +202,11 @@ public abstract class DecayFunctionBuilder // dates and time and geo need special handling parser.nextToken(); - if (fieldType instanceof LegacyDateFieldMapper.DateFieldType - || fieldType instanceof DateFieldMapper.DateFieldType) { + if (fieldType instanceof DateFieldMapper.DateFieldType) { return parseDateVariable(parser, context, fieldType, mode); } else if (fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType) { return parseGeoVariable(parser, context, fieldType, mode); - } else if (fieldType instanceof LegacyNumberFieldMapper.NumberFieldType - || fieldType instanceof NumberFieldMapper.NumberFieldType) { + } else if (fieldType instanceof NumberFieldMapper.NumberFieldType) { return parseNumberVariable(parser, context, fieldType, mode); } else { throw new ParsingException(parser.getTokenLocation(), "field [{}] is of type [{}], but only numeric types are supported.", @@ -315,12 +310,7 @@ public abstract class DecayFunctionBuilder if (originString == null) { origin = context.nowInMillis(); } else { - if (dateFieldType instanceof LegacyDateFieldMapper.DateFieldType) { - origin = ((LegacyDateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null, - context); - } else { - origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null, context); - } + origin = ((DateFieldMapper.DateFieldType) dateFieldType).parseToMilliseconds(originString, false, null, null, context); } if (scaleString == null) { diff --git a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java index 46eb6b7d399..c9664265d3a 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java @@ -19,7 +19,16 @@ package org.elasticsearch.index.search; +import static org.apache.lucene.analysis.synonym.SynonymGraphFilter.GRAPH_FLAG; + import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CachingTokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.synonym.GraphTokenStreamFiniteStrings; +import org.apache.lucene.analysis.tokenattributes.FlagsAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause; @@ -27,10 +36,12 @@ import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.GraphQuery; import org.apache.lucene.search.MultiPhraseQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.QueryBuilder; import org.elasticsearch.ElasticsearchException; @@ -47,6 +58,8 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; public class MatchQuery { @@ -112,13 +125,19 @@ public class MatchQuery { } } - /** the default phrase slop */ + /** + * the default phrase slop + */ public static final int DEFAULT_PHRASE_SLOP = 0; - /** the default leniency setting */ + /** + * the default leniency setting + */ public static final boolean DEFAULT_LENIENCY = false; - /** the default zero terms query */ + /** + * the default zero terms query + */ public static final ZeroTermsQuery DEFAULT_ZERO_TERMS_QUERY = ZeroTermsQuery.NONE; protected final QueryShardContext context; @@ -297,11 +316,126 @@ public class MatchQuery { this.mapper = mapper; } + /** + * Creates a query from the analysis chain. Overrides original so all it does is create the token stream and pass that into the + * new {@link #createFieldQuery(TokenStream, Occur, String, boolean, int)} method which has all the original query generation logic. + * + * @param analyzer analyzer used for this query + * @param operator default boolean operator used for this query + * @param field field to create queries against + * @param queryText text to be passed to the analysis chain + * @param quoted true if phrases should be generated when terms occur at more than one position + * @param phraseSlop slop factor for phrase/multiphrase queries + */ + @Override + protected final Query createFieldQuery(Analyzer analyzer, BooleanClause.Occur operator, String field, String queryText, + boolean quoted, int phraseSlop) { + assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST; + + // Use the analyzer to get all the tokens, and then build an appropriate + // query based on the analysis chain. + try (TokenStream source = analyzer.tokenStream(field, queryText)) { + return createFieldQuery(source, operator, field, quoted, phraseSlop); + } catch (IOException e) { + throw new RuntimeException("Error analyzing query text", e); + } + } + + /** + * Creates a query from a token stream. Same logic as {@link #createFieldQuery(Analyzer, Occur, String, String, boolean, int)} + * with additional graph token stream detection. + * + * @param source the token stream to create the query from + * @param operator default boolean operator used for this query + * @param field field to create queries against + * @param quoted true if phrases should be generated when terms occur at more than one position + * @param phraseSlop slop factor for phrase/multiphrase queries + */ + protected final Query createFieldQuery(TokenStream source, BooleanClause.Occur operator, String field, boolean quoted, + int phraseSlop) { + assert operator == BooleanClause.Occur.SHOULD || operator == BooleanClause.Occur.MUST; + + // Build an appropriate query based on the analysis chain. + try (CachingTokenFilter stream = new CachingTokenFilter(source)) { + + TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + PositionIncrementAttribute posIncAtt = stream.addAttribute(PositionIncrementAttribute.class); + PositionLengthAttribute posLenAtt = stream.addAttribute(PositionLengthAttribute.class); + FlagsAttribute flagsAtt = stream.addAttribute(FlagsAttribute.class); + + if (termAtt == null) { + return null; + } + + // phase 1: read through the stream and assess the situation: + // counting the number of tokens/positions and marking if we have any synonyms. + + int numTokens = 0; + int positionCount = 0; + boolean hasSynonyms = false; + boolean isGraph = false; + + stream.reset(); + while (stream.incrementToken()) { + numTokens++; + int positionIncrement = posIncAtt.getPositionIncrement(); + if (positionIncrement != 0) { + positionCount += positionIncrement; + } else { + hasSynonyms = true; + } + + int positionLength = posLenAtt.getPositionLength(); + if (!isGraph && positionLength > 1 && ((flagsAtt.getFlags() & GRAPH_FLAG) == GRAPH_FLAG)) { + isGraph = true; + } + } + + // phase 2: based on token count, presence of synonyms, and options + // formulate a single term, boolean, or phrase. + + if (numTokens == 0) { + return null; + } else if (numTokens == 1) { + // single term + return analyzeTerm(field, stream); + } else if (isGraph) { + // graph + return analyzeGraph(stream, operator, field, quoted, phraseSlop); + } else if (quoted && positionCount > 1) { + // phrase + if (hasSynonyms) { + // complex phrase with synonyms + return analyzeMultiPhrase(field, stream, phraseSlop); + } else { + // simple phrase + return analyzePhrase(field, stream, phraseSlop); + } + } else { + // boolean + if (positionCount == 1) { + // only one position, with synonyms + return analyzeBoolean(field, stream); + } else { + // complex case: multiple positions + return analyzeMultiBoolean(field, stream, operator); + } + } + } catch (IOException e) { + throw new RuntimeException("Error analyzing query text", e); + } + } + @Override protected Query newTermQuery(Term term) { return blendTermQuery(term, mapper); } + @Override + protected Query newSynonymQuery(Term[] terms) { + return blendTermsQuery(terms, mapper); + } + public Query createPhrasePrefixQuery(String field, String queryText, int phraseSlop, int maxExpansions) { final Query query = createFieldQuery(getAnalyzer(), Occur.MUST, field, queryText, true, phraseSlop); float boost = 1; @@ -319,7 +453,7 @@ public class MatchQuery { Term[] terms = pq.getTerms(); int[] positions = pq.getPositions(); for (int i = 0; i < terms.length; i++) { - prefixQuery.add(new Term[] {terms[i]}, positions[i]); + prefixQuery.add(new Term[]{terms[i]}, positions[i]); } return boost == 1 ? prefixQuery : new BoostQuery(prefixQuery, boost); } else if (innerQuery instanceof MultiPhraseQuery) { @@ -340,11 +474,13 @@ public class MatchQuery { return query; } - public Query createCommonTermsQuery(String field, String queryText, Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, MappedFieldType fieldType) { + public Query createCommonTermsQuery(String field, String queryText, Occur highFreqOccur, Occur lowFreqOccur, float + maxTermFrequency, MappedFieldType fieldType) { Query booleanQuery = createBooleanQuery(field, queryText, lowFreqOccur); if (booleanQuery != null && booleanQuery instanceof BooleanQuery) { BooleanQuery bq = (BooleanQuery) booleanQuery; - ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, ((BooleanQuery)booleanQuery).isCoordDisabled(), fieldType); + ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, ( + (BooleanQuery) booleanQuery).isCoordDisabled(), fieldType); for (BooleanClause clause : bq.clauses()) { if (!(clause.getQuery() instanceof TermQuery)) { return booleanQuery; @@ -356,6 +492,34 @@ public class MatchQuery { return booleanQuery; } + + /** + * Creates a query from a graph token stream by extracting all the finite strings from the graph and using them to create the query. + */ + protected Query analyzeGraph(TokenStream source, BooleanClause.Occur operator, String field, boolean quoted, int phraseSlop) + throws IOException { + source.reset(); + GraphTokenStreamFiniteStrings graphTokenStreams = new GraphTokenStreamFiniteStrings(); + List tokenStreams = graphTokenStreams.getTokenStreams(source); + + if (tokenStreams.isEmpty()) { + return null; + } + + List queries = new ArrayList<>(tokenStreams.size()); + for (TokenStream ts : tokenStreams) { + Query query = createFieldQuery(ts, operator, field, quoted, phraseSlop); + if (query != null) { + queries.add(query); + } + } + + return new GraphQuery(queries.toArray(new Query[0])); + } + } + + protected Query blendTermsQuery(Term[] terms, MappedFieldType fieldType) { + return new SynonymQuery(terms); } protected Query blendTermQuery(Term term, MappedFieldType fieldType) { diff --git a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index 9ac7e2e7520..d08d4aaddc1 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -158,6 +158,10 @@ public class MultiMatchQuery extends MatchQuery { return MultiMatchQuery.super.blendTermQuery(term, fieldType); } + public Query blendTerms(Term[] terms, MappedFieldType fieldType) { + return MultiMatchQuery.super.blendTermsQuery(terms, fieldType); + } + public Query termQuery(MappedFieldType fieldType, Object value) { return MultiMatchQuery.this.termQuery(fieldType, value, lenient); } @@ -223,6 +227,18 @@ public class MultiMatchQuery extends MatchQuery { return queries.isEmpty() ? null : queries; } + @Override + public Query blendTerms(Term[] terms, MappedFieldType fieldType) { + if (blendedFields == null || blendedFields.length == 1) { + return super.blendTerms(terms, fieldType); + } + BytesRef[] values = new BytesRef[terms.length]; + for (int i = 0; i < terms.length; i++) { + values[i] = terms[i].bytes(); + } + return MultiMatchQuery.blendTerms(context, values, commonTermsCutoff, tieBreaker, blendedFields); + } + @Override public Query blendTerm(Term term, MappedFieldType fieldType) { if (blendedFields == null) { @@ -243,44 +259,51 @@ public class MultiMatchQuery extends MatchQuery { } static Query blendTerm(QueryShardContext context, BytesRef value, Float commonTermsCutoff, float tieBreaker, + FieldAndFieldType... blendedFields) { + return blendTerms(context, new BytesRef[] {value}, commonTermsCutoff, tieBreaker, blendedFields); + } + + static Query blendTerms(QueryShardContext context, BytesRef[] values, Float commonTermsCutoff, float tieBreaker, FieldAndFieldType... blendedFields) { List queries = new ArrayList<>(); - Term[] terms = new Term[blendedFields.length]; - float[] blendedBoost = new float[blendedFields.length]; + Term[] terms = new Term[blendedFields.length * values.length]; + float[] blendedBoost = new float[blendedFields.length * values.length]; int i = 0; for (FieldAndFieldType ft : blendedFields) { - Query query; - try { - query = ft.fieldType.termQuery(value, context); - } catch (IllegalArgumentException e) { - // the query expects a certain class of values such as numbers - // of ip addresses and the value can't be parsed, so ignore this - // field - continue; - } catch (ElasticsearchParseException parseException) { - // date fields throw an ElasticsearchParseException with the - // underlying IAE as the cause, ignore this field if that is - // the case - if (parseException.getCause() instanceof IllegalArgumentException) { + for (BytesRef term : values) { + Query query; + try { + query = ft.fieldType.termQuery(term, context); + } catch (IllegalArgumentException e) { + // the query expects a certain class of values such as numbers + // of ip addresses and the value can't be parsed, so ignore this + // field continue; + } catch (ElasticsearchParseException parseException) { + // date fields throw an ElasticsearchParseException with the + // underlying IAE as the cause, ignore this field if that is + // the case + if (parseException.getCause() instanceof IllegalArgumentException) { + continue; + } + throw parseException; } - throw parseException; - } - float boost = ft.boost; - while (query instanceof BoostQuery) { - BoostQuery bq = (BoostQuery) query; - query = bq.getQuery(); - boost *= bq.getBoost(); - } - if (query.getClass() == TermQuery.class) { - terms[i] = ((TermQuery) query).getTerm(); - blendedBoost[i] = boost; - i++; - } else { - if (boost != 1f) { - query = new BoostQuery(query, boost); + float boost = ft.boost; + while (query instanceof BoostQuery) { + BoostQuery bq = (BoostQuery) query; + query = bq.getQuery(); + boost *= bq.getBoost(); + } + if (query.getClass() == TermQuery.class) { + terms[i] = ((TermQuery) query).getTerm(); + blendedBoost[i] = boost; + i++; + } else { + if (boost != 1f) { + query = new BoostQuery(query, boost); + } + queries.add(query); } - queries.add(query); } } if (i > 0) { @@ -317,6 +340,14 @@ public class MultiMatchQuery extends MatchQuery { return queryBuilder.blendTerm(term, fieldType); } + @Override + protected Query blendTermsQuery(Term[] terms, MappedFieldType fieldType) { + if (queryBuilder == null) { + return super.blendTermsQuery(terms, fieldType); + } + return queryBuilder.blendTerms(terms, fieldType); + } + static final class FieldAndFieldType { final MappedFieldType fieldType; final float boost; diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java deleted file mode 100644 index 16e86f083b3..00000000000 --- a/core/src/main/java/org/elasticsearch/index/search/geo/GeoDistanceRangeQuery.java +++ /dev/null @@ -1,236 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.geo; - -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.ConstantScoreScorer; -import org.apache.lucene.search.ConstantScoreWeight; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; -import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper; -import org.elasticsearch.index.query.QueryShardContext; - -import java.io.IOException; - -/** - * Query geo_point fields by distance ranges. Used for indexes created prior to 2.2 - * @deprecated - */ -@Deprecated -public class GeoDistanceRangeQuery extends Query { - - private final double lat; - private final double lon; - - private final double inclusiveLowerPoint; // in meters - private final double inclusiveUpperPoint; // in meters - - private final GeoDistance geoDistance; - private final GeoDistance.FixedSourceDistance fixedSourceDistance; - private GeoDistance.DistanceBoundingCheck distanceBoundingCheck; - private final Query boundingBoxFilter; - - private final IndexGeoPointFieldData indexFieldData; - - public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower, - boolean includeUpper, GeoDistance geoDistance, - LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, - IndexGeoPointFieldData indexFieldData, String optimizeBbox, QueryShardContext context) { - this.lat = point.lat(); - this.lon = point.lon(); - this.geoDistance = geoDistance; - this.indexFieldData = indexFieldData; - - this.fixedSourceDistance = geoDistance.fixedSourceDistance(lat, lon, DistanceUnit.DEFAULT); - - if (lowerVal != null) { - double f = lowerVal.doubleValue(); - long i = NumericUtils.doubleToSortableLong(f); - inclusiveLowerPoint = NumericUtils.sortableLongToDouble(includeLower ? i : (i + 1L)); - } else { - inclusiveLowerPoint = Double.NEGATIVE_INFINITY; - } - if (upperVal != null) { - double f = upperVal.doubleValue(); - long i = NumericUtils.doubleToSortableLong(f); - inclusiveUpperPoint = NumericUtils.sortableLongToDouble(includeUpper ? i : (i - 1L)); - } else { - inclusiveUpperPoint = Double.POSITIVE_INFINITY; - // we disable bounding box in this case, since the upper point is all and we create bounding box up to the - // upper point it will effectively include all - // TODO we can create a bounding box up to from and "not" it - optimizeBbox = null; - } - - if (optimizeBbox != null && !"none".equals(optimizeBbox)) { - distanceBoundingCheck = GeoDistance.distanceBoundingCheck(lat, lon, inclusiveUpperPoint, DistanceUnit.DEFAULT); - if ("memory".equals(optimizeBbox)) { - boundingBoxFilter = null; - } else if ("indexed".equals(optimizeBbox)) { - boundingBoxFilter = LegacyIndexedGeoBoundingBoxQuery.create(distanceBoundingCheck.topLeft(), - distanceBoundingCheck.bottomRight(), fieldType, context); - distanceBoundingCheck = GeoDistance.ALWAYS_INSTANCE; // fine, we do the bounding box check using the filter - } else { - throw new IllegalArgumentException("type [" + optimizeBbox + "] for bounding box optimization not supported"); - } - } else { - distanceBoundingCheck = GeoDistance.ALWAYS_INSTANCE; - boundingBoxFilter = null; - } - } - - public double lat() { - return lat; - } - - public double lon() { - return lon; - } - - public GeoDistance geoDistance() { - return geoDistance; - } - - public double minInclusiveDistance() { - return inclusiveLowerPoint; - } - - public double maxInclusiveDistance() { - return inclusiveUpperPoint; - } - - public String fieldName() { - return indexFieldData.getFieldName(); - } - - @Override - public Query rewrite(IndexReader reader) throws IOException { - return super.rewrite(reader); - } - - @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - final Weight boundingBoxWeight; - if (boundingBoxFilter != null) { - boundingBoxWeight = searcher.createNormalizedWeight(boundingBoxFilter, false); - } else { - boundingBoxWeight = null; - } - return new ConstantScoreWeight(this) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - final DocIdSetIterator approximation; - if (boundingBoxWeight != null) { - Scorer s = boundingBoxWeight.scorer(context); - if (s == null) { - // if the approximation does not match anything, we're done - return null; - } - approximation = s.iterator(); - } else { - approximation = DocIdSetIterator.all(context.reader().maxDoc()); - } - final MultiGeoPointValues values = indexFieldData.load(context).getGeoPointValues(); - final TwoPhaseIterator twoPhaseIterator = new TwoPhaseIterator(approximation) { - @Override - public boolean matches() throws IOException { - final int doc = approximation.docID(); - values.setDocument(doc); - final int length = values.count(); - for (int i = 0; i < length; i++) { - GeoPoint point = values.valueAt(i); - if (distanceBoundingCheck.isWithin(point.lat(), point.lon())) { - double d = fixedSourceDistance.calculate(point.lat(), point.lon()); - if (d >= inclusiveLowerPoint && d <= inclusiveUpperPoint) { - return true; - } - } - } - return false; - } - - @Override - public float matchCost() { - if (distanceBoundingCheck == GeoDistance.ALWAYS_INSTANCE) { - return 0.0f; - } else { - // TODO: is this right (up to 4 comparisons from GeoDistance.SimpleDistanceBoundingCheck)? - return 4.0f; - } - } - }; - return new ConstantScoreScorer(this, score(), twoPhaseIterator); - } - }; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (sameClassAs(o) == false) return false; - - GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) o; - - if (Double.compare(filter.inclusiveLowerPoint, inclusiveLowerPoint) != 0) return false; - if (Double.compare(filter.inclusiveUpperPoint, inclusiveUpperPoint) != 0) return false; - if (Double.compare(filter.lat, lat) != 0) return false; - if (Double.compare(filter.lon, lon) != 0) return false; - if (!indexFieldData.getFieldName().equals(filter.indexFieldData.getFieldName())) - return false; - if (geoDistance != filter.geoDistance) return false; - - return true; - } - - @Override - public String toString(String field) { - return "GeoDistanceRangeQuery(" + indexFieldData.getFieldName() + ", " + geoDistance + ", [" - + inclusiveLowerPoint + " - " + inclusiveUpperPoint + "], " + lat + ", " + lon + ")"; - } - - @Override - public int hashCode() { - int result = classHash(); - long temp; - temp = lat != +0.0d ? Double.doubleToLongBits(lat) : 0L; - result = 31 * result + Long.hashCode(temp); - temp = lon != +0.0d ? Double.doubleToLongBits(lon) : 0L; - result = 31 * result + Long.hashCode(temp); - temp = inclusiveLowerPoint != +0.0d ? Double.doubleToLongBits(inclusiveLowerPoint) : 0L; - result = 31 * result + Long.hashCode(temp); - temp = inclusiveUpperPoint != +0.0d ? Double.doubleToLongBits(inclusiveUpperPoint) : 0L; - result = 31 * result + Long.hashCode(temp); - result = 31 * result + (geoDistance != null ? geoDistance.hashCode() : 0); - result = 31 * result + indexFieldData.getFieldName().hashCode(); - return result; - } - -} diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java deleted file mode 100644 index bb39d752c0e..00000000000 --- a/core/src/main/java/org/elasticsearch/index/search/geo/GeoPolygonQuery.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.geo; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.RandomAccessWeight; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.Bits; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; - -import java.io.IOException; -import java.util.Arrays; - -public class GeoPolygonQuery extends Query { - - private final GeoPoint[] points; - - private final IndexGeoPointFieldData indexFieldData; - - public GeoPolygonQuery(IndexGeoPointFieldData indexFieldData, GeoPoint...points) { - this.points = points; - this.indexFieldData = indexFieldData; - } - - public GeoPoint[] points() { - return points; - } - - public String fieldName() { - return indexFieldData.getFieldName(); - } - - @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new RandomAccessWeight(this) { - @Override - protected Bits getMatchingDocs(LeafReaderContext context) throws IOException { - final int maxDoc = context.reader().maxDoc(); - final MultiGeoPointValues values = indexFieldData.load(context).getGeoPointValues(); - return new Bits() { - - private boolean pointInPolygon(GeoPoint[] points, double lat, double lon) { - boolean inPoly = false; - - for (int i = 1; i < points.length; i++) { - if (points[i].lon() < lon && points[i-1].lon() >= lon - || points[i-1].lon() < lon && points[i].lon() >= lon) { - if (points[i].lat() + (lon - points[i].lon()) / - (points[i-1].lon() - points[i].lon()) * (points[i-1].lat() - points[i].lat()) < lat) { - inPoly = !inPoly; - } - } - } - return inPoly; - } - - @Override - public boolean get(int doc) { - values.setDocument(doc); - final int length = values.count(); - for (int i = 0; i < length; i++) { - GeoPoint point = values.valueAt(i); - if (pointInPolygon(points, point.lat(), point.lon())) { - return true; - } - } - return false; - } - - @Override - public int length() { - return maxDoc; - } - - }; - } - }; - } - - @Override - public String toString(String field) { - StringBuilder sb = new StringBuilder("GeoPolygonQuery("); - sb.append(indexFieldData.getFieldName()); - sb.append(", ").append(Arrays.toString(points)).append(')'); - return sb.toString(); - } - - @Override - public boolean equals(Object obj) { - if (sameClassAs(obj) == false) { - return false; - } - GeoPolygonQuery that = (GeoPolygonQuery) obj; - return indexFieldData.getFieldName().equals(that.indexFieldData.getFieldName()) - && Arrays.equals(points, that.points); - } - - @Override - public int hashCode() { - int h = classHash(); - h = 31 * h + indexFieldData.getFieldName().hashCode(); - h = 31 * h + Arrays.hashCode(points); - return h; - } -} diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/LegacyInMemoryGeoBoundingBoxQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/LegacyInMemoryGeoBoundingBoxQuery.java deleted file mode 100644 index 2d8ea7af49d..00000000000 --- a/core/src/main/java/org/elasticsearch/index/search/geo/LegacyInMemoryGeoBoundingBoxQuery.java +++ /dev/null @@ -1,168 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.geo; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.RandomAccessWeight; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.Bits; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; - -import java.io.IOException; -import java.util.Objects; - -/** - * - * @deprecated This query is no longer used for geo_point indexes created after version 2.1 - */ -@Deprecated -public class LegacyInMemoryGeoBoundingBoxQuery extends Query { - - private final GeoPoint topLeft; - private final GeoPoint bottomRight; - - private final IndexGeoPointFieldData indexFieldData; - - public LegacyInMemoryGeoBoundingBoxQuery(GeoPoint topLeft, GeoPoint bottomRight, IndexGeoPointFieldData indexFieldData) { - this.topLeft = topLeft; - this.bottomRight = bottomRight; - this.indexFieldData = indexFieldData; - } - - public GeoPoint topLeft() { - return topLeft; - } - - public GeoPoint bottomRight() { - return bottomRight; - } - - public String fieldName() { - return indexFieldData.getFieldName(); - } - - @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new RandomAccessWeight(this) { - @Override - protected Bits getMatchingDocs(LeafReaderContext context) throws IOException { - final int maxDoc = context.reader().maxDoc(); - final MultiGeoPointValues values = indexFieldData.load(context).getGeoPointValues(); - // checks to see if bounding box crosses 180 degrees - if (topLeft.lon() > bottomRight.lon()) { - return new Meridian180GeoBoundingBoxBits(maxDoc, values, topLeft, bottomRight); - } else { - return new GeoBoundingBoxBits(maxDoc, values, topLeft, bottomRight); - } - } - }; - } - - @Override - public String toString(String field) { - return "GeoBoundingBoxFilter(" + indexFieldData.getFieldName() + ", " + topLeft + ", " + bottomRight + ")"; - } - - @Override - public boolean equals(Object obj) { - if (sameClassAs(obj) == false) { - return false; - } - LegacyInMemoryGeoBoundingBoxQuery other = (LegacyInMemoryGeoBoundingBoxQuery) obj; - return fieldName().equalsIgnoreCase(other.fieldName()) - && topLeft.equals(other.topLeft) - && bottomRight.equals(other.bottomRight); - } - - @Override - public int hashCode() { - return Objects.hash(classHash(), fieldName(), topLeft, bottomRight); - } - - private static class Meridian180GeoBoundingBoxBits implements Bits { - private final int maxDoc; - private final MultiGeoPointValues values; - private final GeoPoint topLeft; - private final GeoPoint bottomRight; - - public Meridian180GeoBoundingBoxBits(int maxDoc, MultiGeoPointValues values, GeoPoint topLeft, GeoPoint bottomRight) { - this.maxDoc = maxDoc; - this.values = values; - this.topLeft = topLeft; - this.bottomRight = bottomRight; - } - - @Override - public boolean get(int doc) { - values.setDocument(doc); - final int length = values.count(); - for (int i = 0; i < length; i++) { - GeoPoint point = values.valueAt(i); - if (((topLeft.lon() <= point.lon() || bottomRight.lon() >= point.lon())) && - (topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat())) { - return true; - } - } - return false; - } - - @Override - public int length() { - return maxDoc; - } - } - - private static class GeoBoundingBoxBits implements Bits { - private final int maxDoc; - private final MultiGeoPointValues values; - private final GeoPoint topLeft; - private final GeoPoint bottomRight; - - public GeoBoundingBoxBits(int maxDoc, MultiGeoPointValues values, GeoPoint topLeft, GeoPoint bottomRight) { - this.maxDoc = maxDoc; - this.values = values; - this.topLeft = topLeft; - this.bottomRight = bottomRight; - } - - @Override - public boolean get(int doc) { - values.setDocument(doc); - final int length = values.count(); - for (int i = 0; i < length; i++) { - GeoPoint point = values.valueAt(i); - if (topLeft.lon() <= point.lon() && bottomRight.lon() >= point.lon() - && topLeft.lat() >= point.lat() && bottomRight.lat() <= point.lat()) { - return true; - } - } - return false; - } - - @Override - public int length() { - return maxDoc; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/LegacyIndexedGeoBoundingBoxQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/LegacyIndexedGeoBoundingBoxQuery.java deleted file mode 100644 index 7b44ac62ede..00000000000 --- a/core/src/main/java/org/elasticsearch/index/search/geo/LegacyIndexedGeoBoundingBoxQuery.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.search.geo; - -import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.Query; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper; -import org.elasticsearch.index.query.QueryShardContext; - -/** - * - * @deprecated This query is no longer used for geo_point indexes created after version 2.1 - */ -@Deprecated -public class LegacyIndexedGeoBoundingBoxQuery { - - public static Query create(GeoPoint topLeft, GeoPoint bottomRight, - LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) { - if (!fieldType.isLatLonEnabled()) { - throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.name() - + "], can't use indexed filter on it"); - } - //checks to see if bounding box crosses 180 degrees - if (topLeft.lon() > bottomRight.lon()) { - return westGeoBoundingBoxFilter(topLeft, bottomRight, fieldType, context); - } else { - return eastGeoBoundingBoxFilter(topLeft, bottomRight, fieldType, context); - } - } - - private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, - LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) { - BooleanQuery.Builder filter = new BooleanQuery.Builder(); - filter.setMinimumNumberShouldMatch(1); - filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true, context), Occur.SHOULD); - filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true, context), Occur.SHOULD); - filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, context), Occur.MUST); - return new ConstantScoreQuery(filter.build()); - } - - private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, - LegacyGeoPointFieldMapper.LegacyGeoPointFieldType fieldType, QueryShardContext context) { - BooleanQuery.Builder filter = new BooleanQuery.Builder(); - filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true, context), Occur.MUST); - filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true, context), Occur.MUST); - return new ConstantScoreQuery(filter.build()); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 06ea008fdbb..613fec5523c 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.shard; import org.apache.logging.log4j.Logger; -import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.index.CheckIndex; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.IndexCommit; @@ -125,7 +124,6 @@ import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.repositories.Repository; import org.elasticsearch.search.suggest.completion.CompletionFieldStats; import org.elasticsearch.search.suggest.completion.CompletionStats; -import org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat; import org.elasticsearch.threadpool.ThreadPool; import java.io.FileNotFoundException; @@ -753,10 +751,6 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl CompletionStats completionStats = new CompletionStats(); try (final Engine.Searcher currentSearcher = acquireSearcher("completion_stats")) { completionStats.add(CompletionFieldStats.completionStats(currentSearcher.reader(), fields)); - // Necessary for 2.x shards: - Completion090PostingsFormat postingsFormat = ((Completion090PostingsFormat) - PostingsFormat.forName(Completion090PostingsFormat.CODEC_NAME)); - completionStats.add(postingsFormat.completionStats(currentSearcher.reader(), fields)); } return completionStats; } diff --git a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java index 13ea660b4f0..7dd5cebcb2c 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java +++ b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java @@ -155,7 +155,7 @@ public class TranslogRecoveryPerformer { // we set canHaveDuplicates to true all the time such that we de-optimze the translog case and ensure that all // autoGeneratedID docs that are coming from the primary are updated correctly. Engine.Index engineIndex = IndexShard.prepareIndex(docMapper(index.type()), source(shardId.getIndexName(), index.type(), index.id(), index.source()) - .routing(index.routing()).parent(index.parent()).timestamp(index.timestamp()).ttl(index.ttl()), index.seqNo(), + .routing(index.routing()).parent(index.parent()), index.seqNo(), index.version(), index.versionType().versionTypeForReplicationAndRecovery(), origin, index.getAutoGeneratedIdTimestamp(), true); maybeAddMappingUpdate(engineIndex.type(), engineIndex.parsedDoc().dynamicMappingsUpdate(), engineIndex.id(), allowMappingUpdates); if (logger.isTraceEnabled()) { diff --git a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java index 3762c004036..b6bc8c60815 100644 --- a/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java +++ b/core/src/main/java/org/elasticsearch/index/store/FsDirectoryService.java @@ -84,7 +84,7 @@ public class FsDirectoryService extends DirectoryService { protected Directory newFSDirectory(Path location, LockFactory lockFactory) throws IOException { final String storeType = indexSettings.getSettings().get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.FS.getSettingsKey()); - if (IndexModule.Type.FS.match(storeType) || IndexModule.Type.DEFAULT.match(storeType)) { + if (IndexModule.Type.FS.match(storeType)) { return FSDirectory.open(location, lockFactory); // use lucene defaults } else if (IndexModule.Type.SIMPLEFS.match(storeType)) { return new SimpleFSDirectory(location, lockFactory); diff --git a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 671178dfcc6..520cb13390f 100644 --- a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -47,7 +47,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.StringFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.UidFieldMapper; @@ -163,8 +162,7 @@ public class TermVectorsService { private static boolean isValidField(MappedFieldType fieldType) { // must be a string - if (fieldType instanceof StringFieldMapper.StringFieldType == false - && fieldType instanceof KeywordFieldMapper.KeywordFieldType == false + if (fieldType instanceof KeywordFieldMapper.KeywordFieldType == false && fieldType instanceof TextFieldMapper.TextFieldType == false) { return false; } diff --git a/core/src/main/java/org/elasticsearch/index/translog/Translog.java b/core/src/main/java/org/elasticsearch/index/translog/Translog.java index dd5a633e7fa..92220fa1922 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/core/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -808,15 +808,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC public final BytesReference source; public final String routing; public final String parent; - public final long timestamp; - public final long ttl; - public Source(BytesReference source, String routing, String parent, long timestamp, long ttl) { + public Source(BytesReference source, String routing, String parent) { this.source = source; this.routing = routing; this.parent = parent; - this.timestamp = timestamp; - this.ttl = ttl; } } @@ -834,8 +830,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC private final BytesReference source; private final String routing; private final String parent; - private final long timestamp; - private final long ttl; public Index(StreamInput in) throws IOException { final int format = in.readVInt(); // SERIALIZATION_FORMAT @@ -846,8 +840,10 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC routing = in.readOptionalString(); parent = in.readOptionalString(); this.version = in.readLong(); - this.timestamp = in.readLong(); - this.ttl = in.readLong(); + if (format < FORMAT_SEQ_NO) { + in.readLong(); // timestamp + in.readLong(); // ttl + } this.versionType = VersionType.fromValue(in.readByte()); assert versionType.validateVersionForWrites(this.version); if (format >= FORMAT_AUTO_GENERATED_IDS) { @@ -868,8 +864,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC this.parent = index.parent(); this.seqNo = indexResult.getSeqNo(); this.version = indexResult.getVersion(); - this.timestamp = index.timestamp(); - this.ttl = index.ttl(); this.versionType = index.versionType(); this.autoGeneratedIdTimestamp = index.getAutoGeneratedIdTimestamp(); } @@ -883,8 +877,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC versionType = VersionType.INTERNAL; routing = null; parent = null; - timestamp = 0; - ttl = 0; autoGeneratedIdTimestamp = -1; } @@ -914,14 +906,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC return this.parent; } - public long timestamp() { - return this.timestamp; - } - - public long ttl() { - return this.ttl; - } - public BytesReference source() { return this.source; } @@ -940,7 +924,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC @Override public Source getSource() { - return new Source(source, routing, parent, timestamp, ttl); + return new Source(source, routing, parent); } @Override @@ -952,8 +936,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC out.writeOptionalString(routing); out.writeOptionalString(parent); out.writeLong(version); - out.writeLong(timestamp); - out.writeLong(ttl); + out.writeByte(versionType.getValue()); out.writeLong(autoGeneratedIdTimestamp); out.writeVLong(seqNo); @@ -972,8 +955,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC if (version != index.version || seqNo != index.seqNo || - timestamp != index.timestamp || - ttl != index.ttl || id.equals(index.id) == false || type.equals(index.type) == false || versionType != index.versionType || @@ -998,9 +979,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC result = 31 * result + source.hashCode(); result = 31 * result + (routing != null ? routing.hashCode() : 0); result = 31 * result + (parent != null ? parent.hashCode() : 0); - result = 31 * result + Long.hashCode(timestamp); result = 31 * result + Long.hashCode(autoGeneratedIdTimestamp); - result = 31 * result + Long.hashCode(ttl); return result; } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index c95a8866544..7e701ff28c0 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; @@ -43,13 +42,11 @@ import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.ParentFieldMapper; +import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.ScaledFloatFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.StringFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.mapper.TokenCountFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.mapper.UidFieldMapper; @@ -64,7 +61,6 @@ import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData; -import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.plugins.MapperPlugin; import java.util.ArrayList; @@ -101,19 +97,20 @@ public class IndicesModule extends AbstractModule { for (NumberFieldMapper.NumberType type : NumberFieldMapper.NumberType.values()) { mappers.put(type.typeName(), new NumberFieldMapper.TypeParser(type)); } + for (RangeFieldMapper.RangeType type : RangeFieldMapper.RangeType.values()) { + mappers.put(type.typeName(), new RangeFieldMapper.TypeParser(type)); + } mappers.put(BooleanFieldMapper.CONTENT_TYPE, new BooleanFieldMapper.TypeParser()); mappers.put(BinaryFieldMapper.CONTENT_TYPE, new BinaryFieldMapper.TypeParser()); mappers.put(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser()); mappers.put(IpFieldMapper.CONTENT_TYPE, new IpFieldMapper.TypeParser()); mappers.put(ScaledFloatFieldMapper.CONTENT_TYPE, new ScaledFloatFieldMapper.TypeParser()); - mappers.put(StringFieldMapper.CONTENT_TYPE, new StringFieldMapper.TypeParser()); mappers.put(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser()); mappers.put(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser()); mappers.put(TokenCountFieldMapper.CONTENT_TYPE, new TokenCountFieldMapper.TypeParser()); mappers.put(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(CompletionFieldMapper.CONTENT_TYPE, new CompletionFieldMapper.TypeParser()); - mappers.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser()); mappers.put(LatLonPointFieldMapper.CONTENT_TYPE, new LatLonPointFieldMapper.TypeParser()); if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { mappers.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser()); @@ -143,8 +140,6 @@ public class IndicesModule extends AbstractModule { metadataMappers.put(SourceFieldMapper.NAME, new SourceFieldMapper.TypeParser()); metadataMappers.put(TypeFieldMapper.NAME, new TypeFieldMapper.TypeParser()); metadataMappers.put(AllFieldMapper.NAME, new AllFieldMapper.TypeParser()); - metadataMappers.put(TimestampFieldMapper.NAME, new TimestampFieldMapper.TypeParser()); - metadataMappers.put(TTLFieldMapper.NAME, new TTLFieldMapper.TypeParser()); metadataMappers.put(VersionFieldMapper.NAME, new VersionFieldMapper.TypeParser()); metadataMappers.put(ParentFieldMapper.NAME, new ParentFieldMapper.TypeParser()); metadataMappers.put(SeqNoFieldMapper.NAME, new SeqNoFieldMapper.TypeParser()); @@ -172,7 +167,6 @@ public class IndicesModule extends AbstractModule { bind(IndicesClusterStateService.class).asEagerSingleton(); bind(SyncedFlushService.class).asEagerSingleton(); bind(TransportNodesListShardStoreMetaData.class).asEagerSingleton(); - bind(IndicesTTLService.class).asEagerSingleton(); bind(GlobalCheckpointSyncAction.class).asEagerSingleton(); } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 8ccbe2597ae..664743efaef 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -134,7 +134,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; import java.util.function.Function; -import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -427,8 +426,7 @@ public class IndicesService extends AbstractLifecycleComponent Consumer globalCheckpointSyncer, IndexingOperationListener... indexingOperationListeners) throws IOException { final Index index = indexMetaData.getIndex(); - final Predicate indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(index.getName(), indexExpression, clusterService.state()); - final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexNameMatcher, indexScopeSetting); + final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexScopeSetting); logger.debug("creating Index [{}], shards [{}]/[{}{}] - reason [{}]", indexMetaData.getIndex(), idxSettings.getNumberOfShards(), @@ -466,9 +464,7 @@ public class IndicesService extends AbstractLifecycleComponent * Note: the returned {@link MapperService} should be closed when unneeded. */ public synchronized MapperService createIndexMapperService(IndexMetaData indexMetaData) throws IOException { - final Index index = indexMetaData.getIndex(); - final Predicate indexNameMatcher = (indexExpression) -> indexNameExpressionResolver.matchesIndex(index.getName(), indexExpression, clusterService.state()); - final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexNameMatcher, indexScopeSetting); + final IndexSettings idxSettings = new IndexSettings(indexMetaData, this.settings, indexScopeSetting); final IndexModule indexModule = new IndexModule(idxSettings, analysisRegistry); pluginsService.onIndexModule(indexModule); return indexModule.newIndexMapperService(mapperRegistry); diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 5dd0203d617..89c9421198d 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -152,13 +152,12 @@ import java.util.List; */ public final class AnalysisModule { static { - Settings build = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .build(); + Settings build = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put(IndexMetaData + .SETTING_NUMBER_OF_REPLICAS, 1).put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).build(); IndexMetaData metaData = IndexMetaData.builder("_na_").settings(build).build(); NA_INDEX_SETTINGS = new IndexSettings(metaData, Settings.EMPTY); } + private static final IndexSettings NA_INDEX_SETTINGS; private final HunspellService hunspellService; @@ -171,8 +170,8 @@ public final class AnalysisModule { NamedRegistry> tokenFilters = setupTokenFilters(plugins, hunspellService); NamedRegistry> tokenizers = setupTokenizers(plugins); NamedRegistry>> analyzers = setupAnalyzers(plugins); - analysisRegistry = new AnalysisRegistry(environment, charFilters.getRegistry(), tokenFilters.getRegistry(), - tokenizers.getRegistry(), analyzers.getRegistry()); + analysisRegistry = new AnalysisRegistry(environment, charFilters.getRegistry(), tokenFilters.getRegistry(), tokenizers + .getRegistry(), analyzers.getRegistry()); } HunspellService getHunspellService() { @@ -198,8 +197,8 @@ public final class AnalysisModule { return hunspellDictionaries; } - private NamedRegistry> setupTokenFilters(List plugins, - HunspellService hunspellService) { + private NamedRegistry> setupTokenFilters(List plugins, HunspellService + hunspellService) { NamedRegistry> tokenFilters = new NamedRegistry<>("token_filter"); tokenFilters.register("stop", StopTokenFilterFactory::new); tokenFilters.register("reverse", ReverseTokenFilterFactory::new); @@ -251,8 +250,8 @@ public final class AnalysisModule { tokenFilters.register("scandinavian_folding", ScandinavianFoldingFilterFactory::new); tokenFilters.register("serbian_normalization", SerbianNormalizationFilterFactory::new); - tokenFilters.register("hunspell", requriesAnalysisSettings( - (indexSettings, env, name, settings) -> new HunspellTokenFilterFactory(indexSettings, name, settings, hunspellService))); + tokenFilters.register("hunspell", requriesAnalysisSettings((indexSettings, env, name, settings) -> new HunspellTokenFilterFactory + (indexSettings, name, settings, hunspellService))); tokenFilters.register("cjk_bigram", CJKBigramFilterFactory::new); tokenFilters.register("cjk_width", CJKWidthFilterFactory::new); @@ -341,6 +340,7 @@ public final class AnalysisModule { public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { return provider.get(indexSettings, environment, name, settings); } + @Override public boolean requiresAnalysisSettings() { return true; @@ -355,10 +355,11 @@ public final class AnalysisModule { /** * Creates a new analysis provider. + * * @param indexSettings the index settings for the index this provider is created for - * @param environment the nodes environment to load resources from persistent storage - * @param name the name of the analysis component - * @param settings the component specific settings without context prefixes + * @param environment the nodes environment to load resources from persistent storage + * @param name the name of the analysis component + * @param settings the component specific settings without context prefixes * @return a new provider instance * @throws IOException if an {@link IOException} occurs */ @@ -369,11 +370,11 @@ public final class AnalysisModule { * This can be used to get a default instance of an analysis factory without binding to an index. * * @param environment the nodes environment to load resources from persistent storage - * @param name the name of the analysis component + * @param name the name of the analysis component * @return a new provider instance - * @throws IOException if an {@link IOException} occurs + * @throws IOException if an {@link IOException} occurs * @throws IllegalArgumentException if the provider requires analysis settings ie. if {@link #requiresAnalysisSettings()} returns - * true + * true */ default T get(Environment environment, String name) throws IOException { if (requiresAnalysisSettings()) { diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java index e1189e3197d..d48bb9df272 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java @@ -386,7 +386,7 @@ public enum PreBuiltTokenFilters { public TokenStream create(TokenStream tokenStream, Version version) { return new LimitTokenCountFilter(tokenStream, LimitTokenCountFilterFactory.DEFAULT_MAX_TOKEN_COUNT, LimitTokenCountFilterFactory.DEFAULT_CONSUME_ALL_TOKENS); } - } + }, ; diff --git a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java b/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java deleted file mode 100644 index e0afabdb0f6..00000000000 --- a/core/src/main/java/org/elasticsearch/indices/ttl/IndicesTTLService.java +++ /dev/null @@ -1,359 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.indices.ttl; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.SimpleCollector; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkItemResponse; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.bulk.TransportBulkAction; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Setting.Property; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.engine.Engine; -import org.elasticsearch.index.fieldvisitor.FieldsVisitor; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.Uid; -import org.elasticsearch.index.mapper.VersionFieldMapper; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.IndexShardState; -import org.elasticsearch.indices.IndicesService; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.locks.Condition; -import java.util.concurrent.locks.ReentrantLock; - - -/** - * A node level service that delete expired docs on node primary shards. - */ -public class IndicesTTLService extends AbstractLifecycleComponent { - - public static final Setting INDICES_TTL_INTERVAL_SETTING = - Setting.positiveTimeSetting("indices.ttl.interval", TimeValue.timeValueSeconds(60), - Property.Dynamic, Property.NodeScope); - - private final ClusterService clusterService; - private final IndicesService indicesService; - private final TransportBulkAction bulkAction; - - private final int bulkSize; - private PurgerThread purgerThread; - - @Inject - public IndicesTTLService(Settings settings, ClusterService clusterService, IndicesService indicesService, ClusterSettings clusterSettings, TransportBulkAction bulkAction) { - super(settings); - this.clusterService = clusterService; - this.indicesService = indicesService; - TimeValue interval = INDICES_TTL_INTERVAL_SETTING.get(settings); - this.bulkAction = bulkAction; - this.bulkSize = this.settings.getAsInt("indices.ttl.bulk_size", 10000); - this.purgerThread = new PurgerThread(EsExecutors.threadName(settings, "[ttl_expire]"), interval); - clusterSettings.addSettingsUpdateConsumer(INDICES_TTL_INTERVAL_SETTING, this.purgerThread::resetInterval); - } - - @Override - protected void doStart() { - this.purgerThread.start(); - } - - @Override - protected void doStop() { - try { - this.purgerThread.shutdown(); - } catch (InterruptedException e) { - // we intentionally do not want to restore the interruption flag, we're about to shutdown anyway - } - } - - @Override - protected void doClose() { - } - - private class PurgerThread extends Thread { - private final AtomicBoolean running = new AtomicBoolean(true); - private final Notifier notifier; - private final CountDownLatch shutdownLatch = new CountDownLatch(1); - - - public PurgerThread(String name, TimeValue interval) { - super(name); - setDaemon(true); - this.notifier = new Notifier(interval); - } - - public void shutdown() throws InterruptedException { - if (running.compareAndSet(true, false)) { - notifier.doNotify(); - shutdownLatch.await(); - } - - } - - public void resetInterval(TimeValue interval) { - notifier.setTimeout(interval); - } - - @Override - public void run() { - try { - while (running.get()) { - try { - List shardsToPurge = getShardsToPurge(); - purgeShards(shardsToPurge); - } catch (Exception e) { - if (running.get()) { - logger.warn("failed to execute ttl purge", e); - } - } - if (running.get()) { - notifier.await(); - } - } - } finally { - shutdownLatch.countDown(); - } - } - - /** - * Returns the shards to purge, i.e. the local started primary shards that have ttl enabled and disable_purge to false - */ - private List getShardsToPurge() { - List shardsToPurge = new ArrayList<>(); - MetaData metaData = clusterService.state().metaData(); - for (IndexService indexService : indicesService) { - // check the value of disable_purge for this index - IndexMetaData indexMetaData = metaData.index(indexService.index()); - if (indexMetaData == null) { - continue; - } - if (indexService.getIndexSettings().isTTLPurgeDisabled()) { - continue; - } - - // check if ttl is enabled for at least one type of this index - boolean hasTTLEnabled = false; - for (String type : indexService.mapperService().types()) { - DocumentMapper documentType = indexService.mapperService().documentMapper(type); - if (documentType.TTLFieldMapper().enabled()) { - hasTTLEnabled = true; - break; - } - } - if (hasTTLEnabled) { - for (IndexShard indexShard : indexService) { - if (indexShard.state() == IndexShardState.STARTED && indexShard.routingEntry().primary() && indexShard.routingEntry().started()) { - shardsToPurge.add(indexShard); - } - } - } - } - return shardsToPurge; - } - - public TimeValue getInterval() { - return notifier.getTimeout(); - } - } - - private void purgeShards(List shardsToPurge) { - for (IndexShard shardToPurge : shardsToPurge) { - Query query = shardToPurge.mapperService().fullName(TTLFieldMapper.NAME).rangeQuery(null, System.currentTimeMillis(), false, - true, null); - Engine.Searcher searcher = shardToPurge.acquireSearcher("indices_ttl"); - try { - logger.debug("[{}][{}] purging shard", shardToPurge.routingEntry().index(), shardToPurge.routingEntry().id()); - ExpiredDocsCollector expiredDocsCollector = new ExpiredDocsCollector(); - searcher.searcher().search(query, expiredDocsCollector); - List docsToPurge = expiredDocsCollector.getDocsToPurge(); - - BulkRequest bulkRequest = new BulkRequest(); - for (DocToPurge docToPurge : docsToPurge) { - - bulkRequest.add(new DeleteRequest().index(shardToPurge.routingEntry().getIndexName()).type(docToPurge.type).id(docToPurge.id).version(docToPurge.version).routing(docToPurge.routing)); - bulkRequest = processBulkIfNeeded(bulkRequest, false); - } - processBulkIfNeeded(bulkRequest, true); - } catch (Exception e) { - logger.warn("failed to purge", e); - } finally { - searcher.close(); - } - } - } - - private static class DocToPurge { - public final String type; - public final String id; - public final long version; - public final String routing; - - public DocToPurge(String type, String id, long version, String routing) { - this.type = type; - this.id = id; - this.version = version; - this.routing = routing; - } - } - - private class ExpiredDocsCollector extends SimpleCollector { - private LeafReaderContext context; - private List docsToPurge = new ArrayList<>(); - private NumericDocValues versions; - - public ExpiredDocsCollector() { - } - - @Override - public void setScorer(Scorer scorer) { - } - - @Override - public boolean needsScores() { - return false; - } - - @Override - public void collect(int doc) { - try { - FieldsVisitor fieldsVisitor = new FieldsVisitor(false); - context.reader().document(doc, fieldsVisitor); - Uid uid = fieldsVisitor.uid(); - final long version = versions == null ? Versions.NOT_FOUND : versions.get(doc); - docsToPurge.add(new DocToPurge(uid.type(), uid.id(), version, fieldsVisitor.routing())); - } catch (Exception e) { - logger.trace("failed to collect doc", e); - } - } - - @Override - public void doSetNextReader(LeafReaderContext context) throws IOException { - this.context = context; - this.versions = context.reader().getNumericDocValues(VersionFieldMapper.NAME); - } - - public List getDocsToPurge() { - return this.docsToPurge; - } - } - - private BulkRequest processBulkIfNeeded(BulkRequest bulkRequest, boolean force) { - if ((force && bulkRequest.numberOfActions() > 0) || bulkRequest.numberOfActions() >= bulkSize) { - try { - bulkAction.executeBulk(bulkRequest, new ActionListener() { - @Override - public void onResponse(BulkResponse bulkResponse) { - if (bulkResponse.hasFailures()) { - int failedItems = 0; - for (BulkItemResponse response : bulkResponse) { - if (response.isFailed()) failedItems++; - } - if (logger.isTraceEnabled()) { - logger.trace("bulk deletion failures for [{}]/[{}] items, failure message: [{}]", failedItems, bulkResponse.getItems().length, bulkResponse.buildFailureMessage()); - } else { - logger.error("bulk deletion failures for [{}]/[{}] items", failedItems, bulkResponse.getItems().length); - } - } else { - logger.trace("bulk deletion took {}ms", bulkResponse.getTookInMillis()); - } - } - - @Override - public void onFailure(Exception e) { - if (logger.isTraceEnabled()) { - logger.trace("failed to execute bulk", e); - } else { - logger.warn("failed to execute bulk: ", e); - } - } - }); - } catch (Exception e) { - logger.warn("failed to process bulk", e); - } - bulkRequest = new BulkRequest(); - } - return bulkRequest; - } - - private static final class Notifier { - - private final ReentrantLock lock = new ReentrantLock(); - private final Condition condition = lock.newCondition(); - private volatile TimeValue timeout; - - public Notifier(TimeValue timeout) { - assert timeout != null; - this.timeout = timeout; - } - - public void await() { - lock.lock(); - try { - condition.await(timeout.millis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - // we intentionally do not want to restore the interruption flag, we're about to shutdown anyway - } finally { - lock.unlock(); - } - - } - - public void setTimeout(TimeValue timeout) { - assert timeout != null; - this.timeout = timeout; - doNotify(); - } - - public TimeValue getTimeout() { - return timeout; - } - - public void doNotify() { - lock.lock(); - try { - condition.signalAll(); - } finally { - lock.unlock(); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 670ff19fdab..edb92b6e837 100644 --- a/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/core/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -25,8 +25,6 @@ import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; import java.text.DateFormat; @@ -56,8 +54,7 @@ public final class IngestDocument { private final Map sourceAndMetadata; private final Map ingestMetadata; - public IngestDocument(String index, String type, String id, String routing, String parent, String timestamp, - String ttl, Map source) { + public IngestDocument(String index, String type, String id, String routing, String parent, Map source) { this.sourceAndMetadata = new HashMap<>(); this.sourceAndMetadata.putAll(source); this.sourceAndMetadata.put(MetaData.INDEX.getFieldName(), index); @@ -69,12 +66,6 @@ public final class IngestDocument { if (parent != null) { this.sourceAndMetadata.put(MetaData.PARENT.getFieldName(), parent); } - if (timestamp != null) { - this.sourceAndMetadata.put(MetaData.TIMESTAMP.getFieldName(), timestamp); - } - if (ttl != null) { - this.sourceAndMetadata.put(MetaData.TTL.getFieldName(), ttl); - } this.ingestMetadata = new HashMap<>(); DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT); @@ -639,9 +630,7 @@ public final class IngestDocument { TYPE(TypeFieldMapper.NAME), ID(IdFieldMapper.NAME), ROUTING(RoutingFieldMapper.NAME), - PARENT(ParentFieldMapper.NAME), - TIMESTAMP(TimestampFieldMapper.NAME), - TTL(TTLFieldMapper.NAME); + PARENT(ParentFieldMapper.NAME); private final String fieldName; diff --git a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index 6c701e59c90..0ca89ea37b9 100644 --- a/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/core/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -159,10 +159,8 @@ public class PipelineExecutionService implements ClusterStateListener { String id = indexRequest.id(); String routing = indexRequest.routing(); String parent = indexRequest.parent(); - String timestamp = indexRequest.timestamp(); - String ttl = indexRequest.ttl() == null ? null : indexRequest.ttl().toString(); Map sourceAsMap = indexRequest.sourceAsMap(); - IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, timestamp, ttl, sourceAsMap); + IngestDocument ingestDocument = new IngestDocument(index, type, id, routing, parent, sourceAsMap); pipeline.execute(ingestDocument); Map metadataMap = ingestDocument.extractMetadata(); @@ -173,8 +171,6 @@ public class PipelineExecutionService implements ClusterStateListener { indexRequest.id(metadataMap.get(IngestDocument.MetaData.ID)); indexRequest.routing(metadataMap.get(IngestDocument.MetaData.ROUTING)); indexRequest.parent(metadataMap.get(IngestDocument.MetaData.PARENT)); - indexRequest.timestamp(metadataMap.get(IngestDocument.MetaData.TIMESTAMP)); - indexRequest.ttl(metadataMap.get(IngestDocument.MetaData.TTL)); indexRequest.source(ingestDocument.getSourceAndMetadata()); } catch (Exception e) { totalStats.ingestFailed(); diff --git a/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java b/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java index 68ddce801a4..d01b1f9b432 100644 --- a/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java +++ b/core/src/main/java/org/elasticsearch/monitor/os/OsStats.java @@ -52,7 +52,7 @@ public class OsStats implements Writeable, ToXContent { this.cpu = new Cpu(in); this.mem = new Mem(in); this.swap = new Swap(in); - if (in.getVersion().onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { this.cgroup = in.readOptionalWriteable(Cgroup::new); } else { this.cgroup = null; @@ -65,7 +65,7 @@ public class OsStats implements Writeable, ToXContent { cpu.writeTo(out); mem.writeTo(out); swap.writeTo(out); - if (out.getVersion().onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { out.writeOptionalWriteable(cgroup); } } diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index aa13043bc67..b68773a039a 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -98,7 +98,6 @@ import org.elasticsearch.indices.recovery.PeerRecoverySourceService; import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.store.IndicesStore; -import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.monitor.MonitorService; import org.elasticsearch.monitor.jvm.JvmInfo; @@ -543,7 +542,6 @@ public class Node implements Closeable { injector.getInstance(MappingUpdatedAction.class).setClient(client); injector.getInstance(IndicesService.class).start(); injector.getInstance(IndicesClusterStateService.class).start(); - injector.getInstance(IndicesTTLService.class).start(); injector.getInstance(SnapshotsService.class).start(); injector.getInstance(SnapshotShardsService.class).start(); injector.getInstance(RoutingService.class).start(); @@ -592,11 +590,13 @@ public class Node implements Closeable { discovery.start(); transportService.acceptIncomingRequests(); discovery.startInitialJoin(); - // tribe nodes don't have a master so we shouldn't register an observer - if (DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.get(settings).millis() > 0) { + // tribe nodes don't have a master so we shouldn't register an observer s + final TimeValue initialStateTimeout = DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.get(settings); + if (initialStateTimeout.millis() > 0) { final ThreadPool thread = injector.getInstance(ThreadPool.class); ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, thread.getThreadContext()); if (observer.observedState().getClusterState().nodes().getMasterNodeId() == null) { + logger.debug("waiting to join the cluster. timeout [{}]", initialStateTimeout); final CountDownLatch latch = new CountDownLatch(1); observer.waitForNextChange(new ClusterStateObserver.Listener() { @Override @@ -610,10 +610,10 @@ public class Node implements Closeable { @Override public void onTimeout(TimeValue timeout) { logger.warn("timed out while waiting for initial discovery state - timeout: {}", - DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.get(settings)); + initialStateTimeout); latch.countDown(); } - }, MasterNodeChangePredicate.INSTANCE, DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.get(settings)); + }, MasterNodeChangePredicate.INSTANCE, initialStateTimeout); try { latch.await(); @@ -665,7 +665,6 @@ public class Node implements Closeable { // This can confuse other nodes and delay things - mostly if we're the master and we're running tests. injector.getInstance(Discovery.class).stop(); // we close indices first, so operations won't be allowed on it - injector.getInstance(IndicesTTLService.class).stop(); injector.getInstance(RoutingService.class).stop(); injector.getInstance(ClusterService.class).stop(); injector.getInstance(NodeConnectionsService.class).stop(); @@ -716,7 +715,6 @@ public class Node implements Closeable { toClose.add(() -> stopWatch.stop().start("indices_cluster")); toClose.add(injector.getInstance(IndicesClusterStateService.class)); toClose.add(() -> stopWatch.stop().start("indices")); - toClose.add(injector.getInstance(IndicesTTLService.class)); toClose.add(injector.getInstance(IndicesService.class)); // close filter/fielddata caches after indices toClose.add(injector.getInstance(IndicesStore.class)); diff --git a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 3db9d4340ef..c17b9258f58 100644 --- a/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/core/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -1137,7 +1137,8 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp */ protected Tuple buildBlobStoreIndexShardSnapshots(Map blobs) { int latest = -1; - for (String name : blobs.keySet()) { + Set blobKeys = blobs.keySet(); + for (String name : blobKeys) { if (name.startsWith(SNAPSHOT_INDEX_PREFIX)) { try { int gen = Integer.parseInt(name.substring(SNAPSHOT_INDEX_PREFIX.length())); @@ -1158,15 +1159,17 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp final String file = SNAPSHOT_INDEX_PREFIX + latest; logger.warn((Supplier) () -> new ParameterizedMessage("failed to read index file [{}]", file), e); } + } else if (blobKeys.isEmpty() == false) { + logger.debug("Could not find a readable index-N file in a non-empty shard snapshot directory [{}]", blobContainer.path()); } // We couldn't load the index file - falling back to loading individual snapshots List snapshots = new ArrayList<>(); - for (String name : blobs.keySet()) { + for (String name : blobKeys) { try { BlobStoreIndexShardSnapshot snapshot = null; if (name.startsWith(SNAPSHOT_PREFIX)) { - snapshot = indexShardSnapshotFormat.readBlob(blobContainer, snapshotId.getUUID()); + snapshot = indexShardSnapshotFormat.readBlob(blobContainer, name); } else if (name.startsWith(LEGACY_SNAPSHOT_PREFIX)) { snapshot = indexShardSnapshotLegacyFormat.readBlob(blobContainer, name); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index 82b10361153..3880ec6ca9e 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -66,10 +66,6 @@ public class RestIndexAction extends BaseRestHandler { IndexRequest indexRequest = new IndexRequest(request.param("index"), request.param("type"), request.param("id")); indexRequest.routing(request.param("routing")); indexRequest.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing - indexRequest.timestamp(request.param("timestamp")); - if (request.hasParam("ttl")) { - indexRequest.ttl(request.param("ttl")); - } indexRequest.setPipeline(request.param("pipeline")); indexRequest.source(request.content()); indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT)); diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java index e0211ccec2f..feb2d39b8f6 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestUpdateAction.java @@ -87,10 +87,6 @@ public class RestUpdateAction extends BaseRestHandler { if (upsertRequest != null) { upsertRequest.routing(request.param("routing")); upsertRequest.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing - upsertRequest.timestamp(request.param("timestamp")); - if (request.hasParam("ttl")) { - upsertRequest.ttl(request.param("ttl")); - } upsertRequest.version(RestActions.parseVersion(request)); upsertRequest.versionType(VersionType.fromString(request.param("version_type"), upsertRequest.versionType())); } @@ -98,10 +94,6 @@ public class RestUpdateAction extends BaseRestHandler { if (doc != null) { doc.routing(request.param("routing")); doc.parent(request.param("parent")); // order is important, set it after routing, so it will set the routing - doc.timestamp(request.param("timestamp")); - if (request.hasParam("ttl")) { - doc.ttl(request.param("ttl")); - } doc.version(RestActions.parseVersion(request)); doc.versionType(VersionType.fromString(request.param("version_type"), doc.versionType())); } diff --git a/core/src/main/java/org/elasticsearch/script/Script.java b/core/src/main/java/org/elasticsearch/script/Script.java index 970aff2e05a..d7f06df0f01 100644 --- a/core/src/main/java/org/elasticsearch/script/Script.java +++ b/core/src/main/java/org/elasticsearch/script/Script.java @@ -377,7 +377,7 @@ public final class Script implements ToXContent, Writeable { // Version 5.1+ requires all Script members to be non-null and supports the potential // for more options than just XContentType. Reorders the read in contents to be in // same order as the constructor. - if (in.getVersion().onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { this.type = ScriptType.readFrom(in); this.lang = in.readString(); this.idOrCode = in.readString(); @@ -429,7 +429,7 @@ public final class Script implements ToXContent, Writeable { // Version 5.1+ requires all Script members to be non-null and supports the potential // for more options than just XContentType. Reorders the written out contents to be in // same order as the constructor. - if (out.getVersion().onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { type.writeTo(out); out.writeString(lang); out.writeString(idOrCode); diff --git a/core/src/main/java/org/elasticsearch/script/ScriptService.java b/core/src/main/java/org/elasticsearch/script/ScriptService.java index 1dc1cda0ada..478aac6a55c 100644 --- a/core/src/main/java/org/elasticsearch/script/ScriptService.java +++ b/core/src/main/java/org/elasticsearch/script/ScriptService.java @@ -22,6 +22,7 @@ package org.elasticsearch.script; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.util.IOUtils; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; @@ -52,6 +53,9 @@ import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.env.Environment; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.watcher.FileChangesListener; @@ -599,6 +603,22 @@ public class ScriptService extends AbstractComponent implements Closeable, Clust } else { logger.warn("skipping compile of script file [{}] as all scripted operations are disabled for file scripts", file.toAbsolutePath()); } + } catch (ScriptException e) { + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.prettyPrint(); + builder.startObject(); + ElasticsearchException.toXContent(builder, ToXContent.EMPTY_PARAMS, e); + builder.endObject(); + logger.warn("failed to load/compile script [{}]: {}", scriptNameExt.v1(), builder.string()); + } catch (IOException ioe) { + ioe.addSuppressed(e); + logger.warn((Supplier) () -> new ParameterizedMessage( + "failed to log an appropriate warning after failing to load/compile script [{}]", scriptNameExt.v1()), ioe); + } + /* Log at the whole exception at the debug level as well just in case the stack trace is important. That way you can + * turn on the stack trace if you need it. */ + logger.debug((Supplier) () -> new ParameterizedMessage("failed to load/compile script [{}]. full exception:", + scriptNameExt.v1()), e); } catch (Exception e) { logger.warn((Supplier) () -> new ParameterizedMessage("failed to load/compile script [{}]", scriptNameExt.v1()), e); } diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 99b3d4c8894..7e55d70f3bf 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -38,14 +38,11 @@ import org.elasticsearch.index.query.FieldMaskingSpanQueryBuilder; import org.elasticsearch.index.query.FuzzyQueryBuilder; import org.elasticsearch.index.query.GeoBoundingBoxQueryBuilder; import org.elasticsearch.index.query.GeoDistanceQueryBuilder; -import org.elasticsearch.index.query.GeoDistanceRangeQueryBuilder; import org.elasticsearch.index.query.GeoPolygonQueryBuilder; import org.elasticsearch.index.query.GeoShapeQueryBuilder; -import org.elasticsearch.index.query.GeohashCellQuery; import org.elasticsearch.index.query.HasChildQueryBuilder; import org.elasticsearch.index.query.HasParentQueryBuilder; import org.elasticsearch.index.query.IdsQueryBuilder; -import org.elasticsearch.index.query.IndicesQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.MatchPhrasePrefixQueryBuilder; @@ -726,7 +723,7 @@ public class SearchModule { } private void registerQueryParsers(List plugins) { - registerQuery(new QuerySpec<>(MatchQueryBuilder.QUERY_NAME_FIELD, MatchQueryBuilder::new, MatchQueryBuilder::fromXContent)); + registerQuery(new QuerySpec<>(MatchQueryBuilder.NAME, MatchQueryBuilder::new, MatchQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(MatchPhraseQueryBuilder.NAME, MatchPhraseQueryBuilder::new, MatchPhraseQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(MatchPhrasePrefixQueryBuilder.NAME, MatchPhrasePrefixQueryBuilder::new, MatchPhrasePrefixQueryBuilder::fromXContent)); @@ -742,7 +739,7 @@ public class SearchModule { BooleanQuery.setMaxClauseCount(INDICES_MAX_CLAUSE_COUNT_SETTING.get(settings)); registerQuery(new QuerySpec<>(BoolQueryBuilder.NAME, BoolQueryBuilder::new, BoolQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(TermQueryBuilder.NAME, TermQueryBuilder::new, TermQueryBuilder::fromXContent)); - registerQuery(new QuerySpec<>(TermsQueryBuilder.QUERY_NAME_FIELD, TermsQueryBuilder::new, TermsQueryBuilder::fromXContent)); + registerQuery(new QuerySpec<>(TermsQueryBuilder.NAME, TermsQueryBuilder::new, TermsQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(FuzzyQueryBuilder.NAME, FuzzyQueryBuilder::new, FuzzyQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(RegexpQueryBuilder.NAME, RegexpQueryBuilder::new, RegexpQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(RangeQueryBuilder.NAME, RangeQueryBuilder::new, RangeQueryBuilder::fromXContent)); @@ -760,11 +757,9 @@ public class SearchModule { registerQuery(new QuerySpec<>(SpanFirstQueryBuilder.NAME, SpanFirstQueryBuilder::new, SpanFirstQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(SpanNearQueryBuilder.NAME, SpanNearQueryBuilder::new, SpanNearQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(SpanOrQueryBuilder.NAME, SpanOrQueryBuilder::new, SpanOrQueryBuilder::fromXContent)); - registerQuery(new QuerySpec<>(MoreLikeThisQueryBuilder.QUERY_NAME_FIELD, MoreLikeThisQueryBuilder::new, + registerQuery(new QuerySpec<>(MoreLikeThisQueryBuilder.NAME, MoreLikeThisQueryBuilder::new, MoreLikeThisQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(WrapperQueryBuilder.NAME, WrapperQueryBuilder::new, WrapperQueryBuilder::fromXContent)); - // TODO Remove IndicesQuery in 6.0 - registerQuery(new QuerySpec<>(IndicesQueryBuilder.NAME, IndicesQueryBuilder::new, IndicesQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(CommonTermsQueryBuilder.NAME, CommonTermsQueryBuilder::new, CommonTermsQueryBuilder::fromXContent)); registerQuery( new QuerySpec<>(SpanMultiTermQueryBuilder.NAME, SpanMultiTermQueryBuilder::new, SpanMultiTermQueryBuilder::fromXContent)); @@ -775,11 +770,8 @@ public class SearchModule { registerQuery(new QuerySpec<>(TypeQueryBuilder.NAME, TypeQueryBuilder::new, TypeQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(ScriptQueryBuilder.NAME, ScriptQueryBuilder::new, ScriptQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(GeoDistanceQueryBuilder.NAME, GeoDistanceQueryBuilder::new, GeoDistanceQueryBuilder::fromXContent)); - registerQuery(new QuerySpec<>(GeoDistanceRangeQueryBuilder.NAME, GeoDistanceRangeQueryBuilder::new, - GeoDistanceRangeQueryBuilder::fromXContent)); - registerQuery(new QuerySpec<>(GeoBoundingBoxQueryBuilder.QUERY_NAME_FIELD, GeoBoundingBoxQueryBuilder::new, + registerQuery(new QuerySpec<>(GeoBoundingBoxQueryBuilder.NAME, GeoBoundingBoxQueryBuilder::new, GeoBoundingBoxQueryBuilder::fromXContent)); - registerQuery(new QuerySpec<>(GeohashCellQuery.NAME, GeohashCellQuery.Builder::new, GeohashCellQuery.Builder::fromXContent)); registerQuery(new QuerySpec<>(GeoPolygonQueryBuilder.NAME, GeoPolygonQueryBuilder::new, GeoPolygonQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(ExistsQueryBuilder.NAME, ExistsQueryBuilder::new, ExistsQueryBuilder::fromXContent)); registerQuery(new QuerySpec<>(MatchNoneQueryBuilder.NAME, MatchNoneQueryBuilder::new, MatchNoneQueryBuilder::fromXContent)); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java index 5ff1df9c664..8a16d4c6eb0 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/CustomQueryScorer.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.highlight.QueryScorer; import org.apache.lucene.search.highlight.WeightedSpanTerm; import org.apache.lucene.search.highlight.WeightedSpanTermExtractor; -import org.apache.lucene.spatial.geopoint.search.GeoPointInBBoxQuery; import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.index.query.HasChildQueryBuilder; @@ -78,10 +77,7 @@ public final class CustomQueryScorer extends QueryScorer { @Override protected void extractUnknownQuery(Query query, Map terms) throws IOException { - if (query instanceof FiltersFunctionScoreQuery) { - query = ((FiltersFunctionScoreQuery) query).getSubQuery(); - extract(query, 1F, terms); - } else if (terms.isEmpty()) { + if (terms.isEmpty()) { extractWeightedTerms(terms, query, 1F); } } @@ -92,6 +88,8 @@ public final class CustomQueryScorer extends QueryScorer { return; } else if (query instanceof FunctionScoreQuery) { super.extract(((FunctionScoreQuery) query).getSubQuery(), boost, terms); + } else if (query instanceof FiltersFunctionScoreQuery) { + super.extract(((FiltersFunctionScoreQuery) query).getSubQuery(), boost, terms); } else { super.extract(query, boost, terms); } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java index 84890857c79..e89d87e4fbf 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightPhase.java @@ -27,7 +27,6 @@ import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.StringFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; @@ -89,8 +88,7 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { // what they were doing and try to highlight anyway. if (fieldNameContainsWildcards) { if (fieldMapper.fieldType().typeName().equals(TextFieldMapper.CONTENT_TYPE) == false && - fieldMapper.fieldType().typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false && - fieldMapper.fieldType().typeName().equals(StringFieldMapper.CONTENT_TYPE) == false) { + fieldMapper.fieldType().typeName().equals(KeywordFieldMapper.CONTENT_TYPE) == false) { continue; } } diff --git a/core/src/main/java/org/elasticsearch/search/internal/AliasFilter.java b/core/src/main/java/org/elasticsearch/search/internal/AliasFilter.java index 39de37db597..b1260cc7237 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/AliasFilter.java +++ b/core/src/main/java/org/elasticsearch/search/internal/AliasFilter.java @@ -49,7 +49,7 @@ public final class AliasFilter implements Writeable { public AliasFilter(StreamInput input) throws IOException { aliases = input.readStringArray(); - if (input.getVersion().onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (input.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { filter = input.readOptionalNamedWriteable(QueryBuilder.class); reparseAliases = false; } else { @@ -78,7 +78,7 @@ public final class AliasFilter implements Writeable { @Override public void writeTo(StreamOutput out) throws IOException { out.writeStringArray(aliases); - if (out.getVersion().onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { out.writeOptionalNamedWriteable(filter); } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java b/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java index 95612693f8b..c40b1441000 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/Suggest.java @@ -123,9 +123,8 @@ public class Suggest implements Iterable break; } } - return completionSuggestion; - } else if (suggestionContext.getFieldType2x() != null) { - final IndexReader indexReader = searcher.getIndexReader(); - org.elasticsearch.search.suggest.completion2x.CompletionSuggestion completionSuggestion = - new org.elasticsearch.search.suggest.completion2x.CompletionSuggestion(name, suggestionContext.getSize()); - spare.copyUTF8Bytes(suggestionContext.getText()); - - org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry completionSuggestEntry = - new org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry(new Text(spare.toString()), 0, spare.length()); - completionSuggestion.addTerm(completionSuggestEntry); - - String fieldName = suggestionContext.getField(); - Map results = - new HashMap<>(indexReader.leaves().size() * suggestionContext.getSize()); - for (LeafReaderContext atomicReaderContext : indexReader.leaves()) { - LeafReader atomicReader = atomicReaderContext.reader(); - Terms terms = atomicReader.fields().terms(fieldName); - if (terms instanceof Completion090PostingsFormat.CompletionTerms) { - final Completion090PostingsFormat.CompletionTerms lookupTerms = (Completion090PostingsFormat.CompletionTerms) terms; - final Lookup lookup = lookupTerms.getLookup(suggestionContext.getFieldType2x(), suggestionContext); - if (lookup == null) { - // we don't have a lookup for this segment.. this might be possible if a merge dropped all - // docs from the segment that had a value in this segment. - continue; - } - List lookupResults = lookup.lookup(spare.get(), false, suggestionContext.getSize()); - for (Lookup.LookupResult res : lookupResults) { - - final String key = res.key.toString(); - final float score = res.value; - final org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry.Option value = results.get(key); - if (value == null) { - final org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry.Option option = - new org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry.Option(new Text(key), score, - res.payload == null ? null : new BytesArray(res.payload)); - results.put(key, option); - } else if (value.getScore() < score) { - value.setScore(score); - value.setPayload(res.payload == null ? null : new BytesArray(res.payload)); - } - } - } - } - final List options = - new ArrayList<>(results.values()); - CollectionUtil.introSort(options, scoreComparator); - - int optionCount = Math.min(suggestionContext.getSize(), options.size()); - for (int i = 0; i < optionCount; i++) { - completionSuggestEntry.addOption(options.get(i)); - } - return completionSuggestion; } return null; } - private static final ScoreComparator scoreComparator = new ScoreComparator(); - public static class ScoreComparator implements - Comparator { - @Override - public int compare(org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry.Option o1, - org.elasticsearch.search.suggest.completion2x.CompletionSuggestion.Entry.Option o2) { - return Float.compare(o2.getScore(), o1.getScore()); - } - } - private static void suggest(IndexSearcher searcher, CompletionQuery query, TopSuggestDocsCollector collector) throws IOException { query = (CompletionQuery) query.rewrite(searcher.getIndexReader()); Weight weight = query.createWeight(searcher, collector.needsScores()); diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java index 3b216d9186a..09382d9aaff 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggestionBuilder.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.CompletionFieldMapper; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.QueryParseContext; @@ -41,9 +40,6 @@ import org.elasticsearch.search.suggest.SuggestionBuilder; import org.elasticsearch.search.suggest.SuggestionSearchContext.SuggestionContext; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; -import org.elasticsearch.search.suggest.completion2x.context.CategoryContextMapping; -import org.elasticsearch.search.suggest.completion2x.context.ContextMapping.ContextQuery; -import org.elasticsearch.search.suggest.completion2x.context.GeolocationContextMapping; import java.io.IOException; import java.util.ArrayList; @@ -215,105 +211,6 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder contextQueries = new ArrayList<>(); - - @SuppressWarnings("unchecked") - private Contexts2x addContextQuery(ContextQuery ctx) { - this.contextQueries.add(ctx); - return this; - } - - /** - * Setup a Geolocation for suggestions. See {@link GeolocationContextMapping}. - * @param lat Latitude of the location - * @param lon Longitude of the Location - * @return this - */ - @Deprecated - public Contexts2x addGeoLocation(String name, double lat, double lon, int ... precisions) { - return addContextQuery(GeolocationContextMapping.query(name, lat, lon, precisions)); - } - - /** - * Setup a Geolocation for suggestions. See {@link GeolocationContextMapping}. - * @param lat Latitude of the location - * @param lon Longitude of the Location - * @param precisions precisions as string var-args - * @return this - */ - @Deprecated - public Contexts2x addGeoLocationWithPrecision(String name, double lat, double lon, String ... precisions) { - return addContextQuery(GeolocationContextMapping.query(name, lat, lon, precisions)); - } - - /** - * Setup a Geolocation for suggestions. See {@link GeolocationContextMapping}. - * @param geohash Geohash of the location - * @return this - */ - @Deprecated - public Contexts2x addGeoLocation(String name, String geohash) { - return addContextQuery(GeolocationContextMapping.query(name, geohash)); - } - - /** - * Setup a Category for suggestions. See {@link CategoryContextMapping}. - * @param categories name of the category - * @return this - */ - @Deprecated - public Contexts2x addCategory(String name, CharSequence...categories) { - return addContextQuery(CategoryContextMapping.query(name, categories)); - } - - /** - * Setup a Category for suggestions. See {@link CategoryContextMapping}. - * @param categories name of the category - * @return this - */ - @Deprecated - public Contexts2x addCategory(String name, Iterable categories) { - return addContextQuery(CategoryContextMapping.query(name, categories)); - } - - /** - * Setup a Context Field for suggestions. See {@link CategoryContextMapping}. - * @param fieldvalues name of the category - * @return this - */ - @Deprecated - public Contexts2x addContextField(String name, CharSequence...fieldvalues) { - return addContextQuery(CategoryContextMapping.query(name, fieldvalues)); - } - - /** - * Setup a Context Field for suggestions. See {@link CategoryContextMapping}. - * @param fieldvalues name of the category - * @return this - */ - @Deprecated - public Contexts2x addContextField(String name, Iterable fieldvalues) { - return addContextQuery(CategoryContextMapping.query(name, fieldvalues)); - } - } - private static class InnerBuilder extends CompletionSuggestionBuilder { private String field; @@ -366,8 +263,7 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder> queryContexts = Collections.emptyMap(); - private CompletionFieldMapper2x.CompletionFieldType fieldType2x; - private List contextQueries; CompletionFieldMapper.CompletionFieldType getFieldType() { return this.fieldType; } - CompletionFieldMapper2x.CompletionFieldType getFieldType2x() { - return this.fieldType2x; - } - void setFieldType(CompletionFieldMapper.CompletionFieldType fieldType) { this.fieldType = fieldType; } @@ -113,15 +105,4 @@ public class CompletionSuggestionContext extends SuggestionSearchContext.Suggest return query; } - public void setFieldType2x(CompletionFieldMapper2x.CompletionFieldType type) { - this.fieldType2x = type; - } - - public void setContextQueries(List contextQueries) { - this.contextQueries = contextQueries; - } - - public List getContextQueries() { - return contextQueries; - } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index bd1449bbfe7..9d5838b4b21 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -29,8 +29,8 @@ import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.query.QueryParseContext; @@ -140,7 +140,7 @@ public class GeoContextMapping extends ContextMapping { public Set parseContext(ParseContext parseContext, XContentParser parser) throws IOException, ElasticsearchParseException { if (fieldName != null) { FieldMapper mapper = parseContext.docMapper().mappers().getMapper(fieldName); - if (!(mapper instanceof GeoPointFieldMapper)) { + if (!(mapper instanceof BaseGeoPointFieldMapper)) { throw new ElasticsearchParseException("referenced field must be mapped to geo_point"); } } diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/AnalyzingCompletionLookupProvider.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/AnalyzingCompletionLookupProvider.java deleted file mode 100644 index e9095bfb7d5..00000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/AnalyzingCompletionLookupProvider.java +++ /dev/null @@ -1,413 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x; - -import com.carrotsearch.hppc.ObjectLongHashMap; - -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.codecs.FieldsConsumer; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.Fields; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.suggest.Lookup; -import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester; -import org.apache.lucene.search.suggest.analyzing.XFuzzySuggester; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.Accountables; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.IntsRef; -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.fst.ByteSequenceOutputs; -import org.apache.lucene.util.fst.FST; -import org.apache.lucene.util.fst.PairOutputs; -import org.apache.lucene.util.fst.PairOutputs.Pair; -import org.apache.lucene.util.fst.PositiveIntOutputs; -import org.elasticsearch.common.regex.Regex; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.search.suggest.completion.CompletionStats; -import org.elasticsearch.search.suggest.completion.CompletionSuggestionContext; -import org.elasticsearch.search.suggest.completion.FuzzyOptions; -import org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat.CompletionLookupProvider; -import org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat.LookupFactory; -import org.elasticsearch.search.suggest.completion2x.context.ContextMapping.ContextQuery; - -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; -import java.util.TreeMap; - -public class AnalyzingCompletionLookupProvider extends CompletionLookupProvider { - - // for serialization - public static final int SERIALIZE_PRESERVE_SEPARATORS = 1; - public static final int SERIALIZE_HAS_PAYLOADS = 2; - public static final int SERIALIZE_PRESERVE_POSITION_INCREMENTS = 4; - - private static final int MAX_SURFACE_FORMS_PER_ANALYZED_FORM = 256; - private static final int MAX_GRAPH_EXPANSIONS = -1; - - public static final String CODEC_NAME = "analyzing"; - public static final int CODEC_VERSION_START = 1; - public static final int CODEC_VERSION_SERIALIZED_LABELS = 2; - public static final int CODEC_VERSION_CHECKSUMS = 3; - public static final int CODEC_VERSION_LATEST = CODEC_VERSION_CHECKSUMS; - - private final boolean preserveSep; - private final boolean preservePositionIncrements; - private final int maxSurfaceFormsPerAnalyzedForm; - private final int maxGraphExpansions; - private final boolean hasPayloads; - private final XAnalyzingSuggester prototype; - - public AnalyzingCompletionLookupProvider(boolean preserveSep, boolean preservePositionIncrements, boolean hasPayloads) { - this.preserveSep = preserveSep; - this.preservePositionIncrements = preservePositionIncrements; - this.hasPayloads = hasPayloads; - this.maxSurfaceFormsPerAnalyzedForm = MAX_SURFACE_FORMS_PER_ANALYZED_FORM; - this.maxGraphExpansions = MAX_GRAPH_EXPANSIONS; - int options = preserveSep ? XAnalyzingSuggester.PRESERVE_SEP : 0; - // needs to fixed in the suggester first before it can be supported - //options |= exactFirst ? XAnalyzingSuggester.EXACT_FIRST : 0; - prototype = new XAnalyzingSuggester(null, null, null, options, maxSurfaceFormsPerAnalyzedForm, maxGraphExpansions, - preservePositionIncrements, null, false, 1, XAnalyzingSuggester.SEP_LABEL, XAnalyzingSuggester.PAYLOAD_SEP, - XAnalyzingSuggester.END_BYTE, XAnalyzingSuggester.HOLE_CHARACTER); - } - - @Override - public String getName() { - return "analyzing"; - } - - public boolean getPreserveSep() { - return preserveSep; - } - - public boolean getPreservePositionsIncrements() { - return preservePositionIncrements; - } - - public boolean hasPayloads() { - return hasPayloads; - } - - @Override - public FieldsConsumer consumer(final IndexOutput output) throws IOException { - CodecUtil.writeHeader(output, CODEC_NAME, CODEC_VERSION_LATEST); - return new FieldsConsumer() { - private Map fieldOffsets = new HashMap<>(); - - @Override - public void close() throws IOException { - try { - /* - * write the offsets per field such that we know where - * we need to load the FSTs from - */ - long pointer = output.getFilePointer(); - output.writeVInt(fieldOffsets.size()); - for (Map.Entry entry : fieldOffsets.entrySet()) { - output.writeString(entry.getKey()); - output.writeVLong(entry.getValue()); - } - output.writeLong(pointer); - CodecUtil.writeFooter(output); - } finally { - IOUtils.close(output); - } - } - - @Override - public void write(Fields fields) throws IOException { - for(String field : fields) { - Terms terms = fields.terms(field); - if (terms == null) { - continue; - } - TermsEnum termsEnum = terms.iterator(); - PostingsEnum docsEnum = null; - final SuggestPayload spare = new SuggestPayload(); - int maxAnalyzedPathsForOneInput = 0; - final XAnalyzingSuggester.XBuilder builder = new XAnalyzingSuggester.XBuilder( - maxSurfaceFormsPerAnalyzedForm, hasPayloads, XAnalyzingSuggester.PAYLOAD_SEP); - int docCount = 0; - while (true) { - BytesRef term = termsEnum.next(); - if (term == null) { - break; - } - docsEnum = termsEnum.postings(docsEnum, PostingsEnum.PAYLOADS); - builder.startTerm(term); - int docFreq = 0; - while (docsEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { - for (int i = 0; i < docsEnum.freq(); i++) { - final int position = docsEnum.nextPosition(); - AnalyzingCompletionLookupProvider.this.parsePayload(docsEnum.getPayload(), spare); - builder.addSurface(spare.surfaceForm.get(), spare.payload.get(), spare.weight); - // multi fields have the same surface form so we sum up here - maxAnalyzedPathsForOneInput = Math.max(maxAnalyzedPathsForOneInput, position + 1); - } - docFreq++; - docCount = Math.max(docCount, docsEnum.docID()+1); - } - builder.finishTerm(docFreq); - } - /* - * Here we are done processing the field and we can - * buid the FST and write it to disk. - */ - FST> build = builder.build(); - assert build != null || docCount == 0: "the FST is null but docCount is != 0 actual value: [" + docCount + "]"; - /* - * it's possible that the FST is null if we have 2 segments that get merged - * and all docs that have a value in this field are deleted. This will cause - * a consumer to be created but it doesn't consume any values causing the FSTBuilder - * to return null. - */ - if (build != null) { - fieldOffsets.put(field, output.getFilePointer()); - build.save(output); - /* write some more meta-info */ - output.writeVInt(maxAnalyzedPathsForOneInput); - output.writeVInt(maxSurfaceFormsPerAnalyzedForm); - output.writeInt(maxGraphExpansions); // can be negative - int options = 0; - options |= preserveSep ? SERIALIZE_PRESERVE_SEPARATORS : 0; - options |= hasPayloads ? SERIALIZE_HAS_PAYLOADS : 0; - options |= preservePositionIncrements ? SERIALIZE_PRESERVE_POSITION_INCREMENTS : 0; - output.writeVInt(options); - output.writeVInt(XAnalyzingSuggester.SEP_LABEL); - output.writeVInt(XAnalyzingSuggester.END_BYTE); - output.writeVInt(XAnalyzingSuggester.PAYLOAD_SEP); - output.writeVInt(XAnalyzingSuggester.HOLE_CHARACTER); - } - } - } - }; - } - - - @Override - public LookupFactory load(IndexInput input) throws IOException { - long sizeInBytes = 0; - int version = CodecUtil.checkHeader(input, CODEC_NAME, CODEC_VERSION_START, CODEC_VERSION_LATEST); - if (version >= CODEC_VERSION_CHECKSUMS) { - CodecUtil.checksumEntireFile(input); - } - final long metaPointerPosition = input.length() - (version >= CODEC_VERSION_CHECKSUMS? 8 + CodecUtil.footerLength() : 8); - final Map lookupMap = new HashMap<>(); - input.seek(metaPointerPosition); - long metaPointer = input.readLong(); - input.seek(metaPointer); - int numFields = input.readVInt(); - - Map meta = new TreeMap<>(); - for (int i = 0; i < numFields; i++) { - String name = input.readString(); - long offset = input.readVLong(); - meta.put(offset, name); - } - - for (Map.Entry entry : meta.entrySet()) { - input.seek(entry.getKey()); - FST> fst = new FST<>(input, new PairOutputs<>( - PositiveIntOutputs.getSingleton(), ByteSequenceOutputs.getSingleton())); - int maxAnalyzedPathsForOneInput = input.readVInt(); - int maxSurfaceFormsPerAnalyzedForm = input.readVInt(); - int maxGraphExpansions = input.readInt(); - int options = input.readVInt(); - boolean preserveSep = (options & SERIALIZE_PRESERVE_SEPARATORS) != 0; - boolean hasPayloads = (options & SERIALIZE_HAS_PAYLOADS) != 0; - boolean preservePositionIncrements = (options & SERIALIZE_PRESERVE_POSITION_INCREMENTS) != 0; - - // first version did not include these three fields, so fall back to old default (before the analyzingsuggester - // was updated in Lucene, so we cannot use the suggester defaults) - int sepLabel, payloadSep, endByte, holeCharacter; - switch (version) { - case CODEC_VERSION_START: - sepLabel = 0xFF; - payloadSep = '\u001f'; - endByte = 0x0; - holeCharacter = '\u001E'; - break; - default: - sepLabel = input.readVInt(); - endByte = input.readVInt(); - payloadSep = input.readVInt(); - holeCharacter = input.readVInt(); - } - - AnalyzingSuggestHolder holder = new AnalyzingSuggestHolder(preserveSep, preservePositionIncrements, - maxSurfaceFormsPerAnalyzedForm, maxGraphExpansions, hasPayloads, maxAnalyzedPathsForOneInput, - fst, sepLabel, payloadSep, endByte, holeCharacter); - sizeInBytes += fst.ramBytesUsed(); - lookupMap.put(entry.getValue(), holder); - } - final long ramBytesUsed = sizeInBytes; - return new LookupFactory() { - @Override - public Lookup getLookup(CompletionFieldMapper2x.CompletionFieldType fieldType, CompletionSuggestionContext suggestionContext) { - AnalyzingSuggestHolder analyzingSuggestHolder = lookupMap.get(fieldType.name()); - if (analyzingSuggestHolder == null) { - return null; - } - int flags = analyzingSuggestHolder.getPreserveSeparator() ? XAnalyzingSuggester.PRESERVE_SEP : 0; - - final XAnalyzingSuggester suggester; - final Automaton queryPrefix = fieldType.requiresContext() ? - ContextQuery.toAutomaton(analyzingSuggestHolder.getPreserveSeparator(), suggestionContext.getContextQueries()) : null; - - final FuzzyOptions fuzzyOptions = suggestionContext.getFuzzyOptions(); - if (fuzzyOptions != null) { - suggester = new XFuzzySuggester(fieldType.indexAnalyzer(), queryPrefix, fieldType.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - fuzzyOptions.getEditDistance(), fuzzyOptions.isTranspositions(), - fuzzyOptions.getFuzzyPrefixLength(), fuzzyOptions.getFuzzyMinLength(), fuzzyOptions.isUnicodeAware(), - analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, - analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, - analyzingSuggestHolder.holeCharacter); - } else { - suggester = new XAnalyzingSuggester(fieldType.indexAnalyzer(), queryPrefix, fieldType.searchAnalyzer(), flags, - analyzingSuggestHolder.maxSurfaceFormsPerAnalyzedForm, analyzingSuggestHolder.maxGraphExpansions, - analyzingSuggestHolder.preservePositionIncrements, analyzingSuggestHolder.fst, analyzingSuggestHolder.hasPayloads, - analyzingSuggestHolder.maxAnalyzedPathsForOneInput, analyzingSuggestHolder.sepLabel, - analyzingSuggestHolder.payloadSep, analyzingSuggestHolder.endByte, analyzingSuggestHolder.holeCharacter); - } - return suggester; - } - - @Override - public CompletionStats stats(String... fields) { - long sizeInBytes = 0; - ObjectLongHashMap completionFields = null; - if (fields != null && fields.length > 0) { - completionFields = new ObjectLongHashMap<>(fields.length); - } - - for (Map.Entry entry : lookupMap.entrySet()) { - sizeInBytes += entry.getValue().fst.ramBytesUsed(); - if (fields == null || fields.length == 0) { - continue; - } - if (Regex.simpleMatch(fields, entry.getKey())) { - long fstSize = entry.getValue().fst.ramBytesUsed(); - completionFields.addTo(entry.getKey(), fstSize); - } - } - - return new CompletionStats(sizeInBytes, completionFields); - } - - @Override - AnalyzingSuggestHolder getAnalyzingSuggestHolder(MappedFieldType fieldType) { - return lookupMap.get(fieldType.name()); - } - - @Override - public long ramBytesUsed() { - return ramBytesUsed; - } - - @Override - public Collection getChildResources() { - return Accountables.namedAccountables("field", lookupMap); - } - }; - } - - static class AnalyzingSuggestHolder implements Accountable { - final boolean preserveSep; - final boolean preservePositionIncrements; - final int maxSurfaceFormsPerAnalyzedForm; - final int maxGraphExpansions; - final boolean hasPayloads; - final int maxAnalyzedPathsForOneInput; - final FST> fst; - final int sepLabel; - final int payloadSep; - final int endByte; - final int holeCharacter; - - public AnalyzingSuggestHolder(boolean preserveSep, boolean preservePositionIncrements, - int maxSurfaceFormsPerAnalyzedForm, int maxGraphExpansions, - boolean hasPayloads, int maxAnalyzedPathsForOneInput, - FST> fst, int sepLabel, int payloadSep, - int endByte, int holeCharacter) { - this.preserveSep = preserveSep; - this.preservePositionIncrements = preservePositionIncrements; - this.maxSurfaceFormsPerAnalyzedForm = maxSurfaceFormsPerAnalyzedForm; - this.maxGraphExpansions = maxGraphExpansions; - this.hasPayloads = hasPayloads; - this.maxAnalyzedPathsForOneInput = maxAnalyzedPathsForOneInput; - this.fst = fst; - this.sepLabel = sepLabel; - this.payloadSep = payloadSep; - this.endByte = endByte; - this.holeCharacter = holeCharacter; - } - - public boolean getPreserveSeparator() { - return preserveSep; - } - - public boolean getPreservePositionIncrements() { - return preservePositionIncrements; - } - - public boolean hasPayloads() { - return hasPayloads; - } - - @Override - public long ramBytesUsed() { - if (fst != null) { - return fst.ramBytesUsed(); - } else { - return 0; - } - } - - @Override - public Collection getChildResources() { - if (fst != null) { - return Collections.singleton(Accountables.namedAccountable("fst", fst)); - } else { - return Collections.emptyList(); - } - } - } - - @Override - public Set toFiniteStrings(TokenStream stream) throws IOException { - return prototype.toFiniteStrings(stream); - } - - -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java deleted file mode 100644 index 245f2416b40..00000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/Completion090PostingsFormat.java +++ /dev/null @@ -1,360 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest.completion2x; - -import org.apache.logging.log4j.Logger; -import org.apache.lucene.codecs.CodecUtil; -import org.apache.lucene.codecs.FieldsConsumer; -import org.apache.lucene.codecs.FieldsProducer; -import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.index.Fields; -import org.apache.lucene.index.FilterLeafReader.FilterTerms; -import org.apache.lucene.index.IndexFileNames; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.SegmentReadState; -import org.apache.lucene.index.SegmentWriteState; -import org.apache.lucene.index.Terms; -import org.apache.lucene.search.suggest.Lookup; -import org.apache.lucene.store.IOContext.Context; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.InputStreamDataInput; -import org.apache.lucene.store.OutputStreamDataOutput; -import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.Accountables; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.IOUtils; -import org.apache.lucene.util.Version; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.search.suggest.completion.CompletionStats; -import org.elasticsearch.search.suggest.completion.CompletionSuggestionContext; - -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; - -/** - * This {@link PostingsFormat} is basically a T-Sink for a default postings - * format that is used to store postings on disk fitting the lucene APIs and - * builds a suggest FST as an auxiliary data structure next to the actual - * postings format. It uses the delegate postings format for simplicity to - * handle all the merge operations. The auxiliary suggest FST data structure is - * only loaded if a FieldsProducer is requested for reading, for merging it uses - * the low memory delegate postings format. - */ -public class Completion090PostingsFormat extends PostingsFormat { - - public static final String CODEC_NAME = "completion090"; - public static final int SUGGEST_CODEC_VERSION = 1; - public static final int SUGGEST_VERSION_CURRENT = SUGGEST_CODEC_VERSION; - public static final String EXTENSION = "cmp"; - - private static final Logger logger = Loggers.getLogger(Completion090PostingsFormat.class); - private PostingsFormat delegatePostingsFormat; - private static final Map providers; - private CompletionLookupProvider writeProvider; - - - static { - final CompletionLookupProvider provider = new AnalyzingCompletionLookupProvider(true, true, false); - providers = Collections.singletonMap(provider.getName(), provider); - } - - public Completion090PostingsFormat(PostingsFormat delegatePostingsFormat, CompletionLookupProvider provider) { - super(CODEC_NAME); - this.delegatePostingsFormat = delegatePostingsFormat; - this.writeProvider = provider; - assert delegatePostingsFormat != null && writeProvider != null; - } - - /* - * Used only by core Lucene at read-time via Service Provider instantiation - * do not use at Write-time in application code. - */ - public Completion090PostingsFormat() { - super(CODEC_NAME); - } - - @Override - public CompletionFieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException { - if (delegatePostingsFormat == null) { - throw new UnsupportedOperationException("Error - " + getClass().getName() - + " has been constructed without a choice of PostingsFormat"); - } - assert writeProvider != null; - return new CompletionFieldsConsumer(state); - } - - @Override - public CompletionFieldsProducer fieldsProducer(SegmentReadState state) throws IOException { - return new CompletionFieldsProducer(state); - } - - private class CompletionFieldsConsumer extends FieldsConsumer { - - private FieldsConsumer delegatesFieldsConsumer; - private FieldsConsumer suggestFieldsConsumer; - - public CompletionFieldsConsumer(SegmentWriteState state) throws IOException { - this.delegatesFieldsConsumer = delegatePostingsFormat.fieldsConsumer(state); - String suggestFSTFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION); - IndexOutput output = null; - boolean success = false; - try { - output = state.directory.createOutput(suggestFSTFile, state.context); - CodecUtil.writeIndexHeader(output, CODEC_NAME, SUGGEST_VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); - /* - * we write the delegate postings format name so we can load it - * without getting an instance in the ctor - */ - output.writeString(delegatePostingsFormat.getName()); - output.writeString(writeProvider.getName()); - this.suggestFieldsConsumer = writeProvider.consumer(output); - success = true; - } finally { - if (!success) { - IOUtils.closeWhileHandlingException(output); - } - } - } - - @Override - public void write(Fields fields) throws IOException { - delegatesFieldsConsumer.write(fields); - suggestFieldsConsumer.write(fields); - } - - @Override - public void close() throws IOException { - IOUtils.close(delegatesFieldsConsumer, suggestFieldsConsumer); - } - } - - private static class CompletionFieldsProducer extends FieldsProducer { - // TODO make this class lazyload all the things in order to take advantage of the new merge instance API - // today we just load everything up-front - private final FieldsProducer delegateProducer; - private final LookupFactory lookupFactory; - private final int version; - - public CompletionFieldsProducer(SegmentReadState state) throws IOException { - String suggestFSTFile = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, EXTENSION); - IndexInput input = state.directory.openInput(suggestFSTFile, state.context); - if (state.segmentInfo.getVersion().onOrAfter(Version.LUCENE_6_2_0)) { - // Lucene 6.2.0+ requires all index files to use index header, but prior to that we used an ordinary codec header: - version = CodecUtil.checkIndexHeader(input, CODEC_NAME, SUGGEST_CODEC_VERSION, SUGGEST_VERSION_CURRENT, - state.segmentInfo.getId(), state.segmentSuffix); - } else { - version = CodecUtil.checkHeader(input, CODEC_NAME, SUGGEST_CODEC_VERSION, SUGGEST_VERSION_CURRENT); - } - FieldsProducer delegateProducer = null; - boolean success = false; - try { - PostingsFormat delegatePostingsFormat = PostingsFormat.forName(input.readString()); - String providerName = input.readString(); - CompletionLookupProvider completionLookupProvider = providers.get(providerName); - if (completionLookupProvider == null) { - throw new IllegalStateException("no provider with name [" + providerName + "] registered"); - } - // TODO: we could clone the ReadState and make it always forward IOContext.MERGE to prevent unecessary heap usage? - delegateProducer = delegatePostingsFormat.fieldsProducer(state); - /* - * If we are merging we don't load the FSTs at all such that we - * don't consume so much memory during merge - */ - if (state.context.context != Context.MERGE) { - // TODO: maybe we can do this in a fully lazy fashion based on some configuration - // eventually we should have some kind of curciut breaker that prevents us from going OOM here - // with some configuration - this.lookupFactory = completionLookupProvider.load(input); - } else { - this.lookupFactory = null; - } - this.delegateProducer = delegateProducer; - success = true; - } finally { - if (!success) { - IOUtils.closeWhileHandlingException(delegateProducer, input); - } else { - IOUtils.close(input); - } - } - } - - @Override - public void close() throws IOException { - IOUtils.close(delegateProducer); - } - - @Override - public Iterator iterator() { - return delegateProducer.iterator(); - } - - @Override - public Terms terms(String field) throws IOException { - final Terms terms = delegateProducer.terms(field); - if (terms == null || lookupFactory == null) { - return terms; - } - return new CompletionTerms(terms, lookupFactory); - } - - @Override - public int size() { - return delegateProducer.size(); - } - - @Override - public long ramBytesUsed() { - return (lookupFactory == null ? 0 : lookupFactory.ramBytesUsed()) + delegateProducer.ramBytesUsed(); - } - - @Override - public Collection getChildResources() { - List resources = new ArrayList<>(); - if (lookupFactory != null) { - resources.add(Accountables.namedAccountable("lookup", lookupFactory)); - } - resources.add(Accountables.namedAccountable("delegate", delegateProducer)); - return Collections.unmodifiableList(resources); - } - - @Override - public void checkIntegrity() throws IOException { - delegateProducer.checkIntegrity(); - } - - @Override - public FieldsProducer getMergeInstance() throws IOException { - return delegateProducer.getMergeInstance(); - } - } - - public static final class CompletionTerms extends FilterTerms { - private final LookupFactory lookup; - - public CompletionTerms(Terms delegate, LookupFactory lookup) { - super(delegate); - this.lookup = lookup; - } - - public Lookup getLookup(CompletionFieldMapper2x.CompletionFieldType mapper, CompletionSuggestionContext suggestionContext) { - return lookup.getLookup(mapper, suggestionContext); - } - - public CompletionStats stats(String ... fields) { - return lookup.stats(fields); - } - } - - public abstract static class CompletionLookupProvider implements PayloadProcessor, CompletionTokenStream.ToFiniteStrings { - - public static final char UNIT_SEPARATOR = '\u001f'; - - public abstract FieldsConsumer consumer(IndexOutput output) throws IOException; - - public abstract String getName(); - - public abstract LookupFactory load(IndexInput input) throws IOException; - - @Override - public BytesRef buildPayload(BytesRef surfaceForm, long weight, BytesRef payload) throws IOException { - if (weight < -1 || weight > Integer.MAX_VALUE) { - throw new IllegalArgumentException("weight must be >= -1 && <= Integer.MAX_VALUE"); - } - for (int i = 0; i < surfaceForm.length; i++) { - if (surfaceForm.bytes[i] == UNIT_SEPARATOR) { - throw new IllegalArgumentException( - "surface form cannot contain unit separator character U+001F; this character is reserved"); - } - } - ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); - OutputStreamDataOutput output = new OutputStreamDataOutput(byteArrayOutputStream); - output.writeVLong(weight + 1); - output.writeVInt(surfaceForm.length); - output.writeBytes(surfaceForm.bytes, surfaceForm.offset, surfaceForm.length); - output.writeVInt(payload.length); - output.writeBytes(payload.bytes, 0, payload.length); - - output.close(); - return new BytesRef(byteArrayOutputStream.toByteArray()); - } - - @Override - public void parsePayload(BytesRef payload, SuggestPayload ref) throws IOException { - ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(payload.bytes, payload.offset, payload.length); - InputStreamDataInput input = new InputStreamDataInput(byteArrayInputStream); - ref.weight = input.readVLong() - 1; - int len = input.readVInt(); - ref.surfaceForm.grow(len); - ref.surfaceForm.setLength(len); - input.readBytes(ref.surfaceForm.bytes(), 0, ref.surfaceForm.length()); - len = input.readVInt(); - ref.payload.grow(len); - ref.payload.setLength(len); - input.readBytes(ref.payload.bytes(), 0, ref.payload.length()); - input.close(); - } - } - - /** - * Returns total in-heap bytes used by all suggesters. This method has CPU cost O(numIndexedFields). - * - * @param fieldNamePatterns if non-null, any completion field name matching any of these patterns will break out its in-heap bytes - * separately in the returned {@link CompletionStats} - */ - public CompletionStats completionStats(IndexReader indexReader, String ... fieldNamePatterns) { - CompletionStats completionStats = new CompletionStats(); - for (LeafReaderContext atomicReaderContext : indexReader.leaves()) { - LeafReader atomicReader = atomicReaderContext.reader(); - try { - Fields fields = atomicReader.fields(); - for (String fieldName : fields) { - Terms terms = fields.terms(fieldName); - if (terms instanceof CompletionTerms) { - CompletionTerms completionTerms = (CompletionTerms) terms; - completionStats.add(completionTerms.stats(fieldNamePatterns)); - } - } - } catch (IOException ioe) { - logger.error("Could not get completion stats", ioe); - } - } - - return completionStats; - } - - public abstract static class LookupFactory implements Accountable { - public abstract Lookup getLookup(CompletionFieldMapper2x.CompletionFieldType fieldType, - CompletionSuggestionContext suggestionContext); - public abstract CompletionStats stats(String ... fields); - abstract AnalyzingCompletionLookupProvider.AnalyzingSuggestHolder getAnalyzingSuggestHolder(MappedFieldType fieldType); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionSuggestion.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionSuggestion.java deleted file mode 100644 index 50518ee0eff..00000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionSuggestion.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest.completion2x; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.text.Text; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.search.suggest.Suggest; - -import java.io.IOException; -import java.util.Map; - -public class CompletionSuggestion extends Suggest.Suggestion { - - public static final int TYPE = 2; - - public CompletionSuggestion() { - } - - public CompletionSuggestion(String name, int size) { - super(name, size); - } - - @Override - public int getType() { - return TYPE; - } - - @Override - protected Entry newEntry() { - return new Entry(); - } - - public static class Entry extends org.elasticsearch.search.suggest.Suggest.Suggestion.Entry { - - public Entry(Text text, int offset, int length) { - super(text, offset, length); - } - - protected Entry() { - super(); - } - - @Override - protected Option newOption() { - return new Option(); - } - - public static class Option extends org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option { - private BytesReference payload; - - public Option(Text text, float score, BytesReference payload) { - super(text, score); - this.payload = payload; - } - - - protected Option() { - super(); - } - - public void setPayload(BytesReference payload) { - this.payload = payload; - } - - public BytesReference getPayload() { - return payload; - } - - public String getPayloadAsString() { - return payload.utf8ToString(); - } - - public long getPayloadAsLong() { - return Long.parseLong(payload.utf8ToString()); - } - - public double getPayloadAsDouble() { - return Double.parseDouble(payload.utf8ToString()); - } - - public Map getPayloadAsMap() { - return XContentHelper.convertToMap(payload, false).v2(); - } - - @Override - public void setScore(float score) { - super.setScore(score); - } - - @Override - protected XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { - super.innerToXContent(builder, params); - if (payload != null && payload.length() > 0) { - XContentType contentType = XContentFactory.xContentType(payload); - if (contentType == null) { - // must be a string or number - builder.field("payload", payload.utf8ToString()); - } else { - builder.rawField("payload", payload); - } - } - return builder; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - payload = in.readBytesReference(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeBytesReference(payload); - } - } - } - -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionTokenStream.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionTokenStream.java deleted file mode 100644 index de81caa7e5d..00000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/CompletionTokenStream.java +++ /dev/null @@ -1,173 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest.completion2x; - -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; -import org.apache.lucene.analysis.tokenattributes.PayloadAttribute; -import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; -import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; -import org.apache.lucene.util.AttributeImpl; -import org.apache.lucene.util.AttributeReflector; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.CharsRefBuilder; -import org.apache.lucene.util.IntsRef; -import org.apache.lucene.util.fst.Util; - -import java.io.IOException; -import java.util.Iterator; -import java.util.Set; - -public final class CompletionTokenStream extends TokenStream { - - private final PayloadAttribute payloadAttr = addAttribute(PayloadAttribute.class); - private final PositionIncrementAttribute posAttr = addAttribute(PositionIncrementAttribute.class); - private final ByteTermAttribute bytesAtt = addAttribute(ByteTermAttribute.class);; - - - private final TokenStream input; - private BytesRef payload; - private Iterator finiteStrings; - private ToFiniteStrings toFiniteStrings; - private int posInc = -1; - private static final int MAX_PATHS = 256; - private CharTermAttribute charTermAttribute; - - public CompletionTokenStream(TokenStream input, BytesRef payload, ToFiniteStrings toFiniteStrings) { - // Don't call the super(input) ctor - this is a true delegate and has a new attribute source since we consume - // the input stream entirely in toFiniteStrings(input) - this.input = input; - this.payload = payload; - this.toFiniteStrings = toFiniteStrings; - } - - @Override - public boolean incrementToken() throws IOException { - clearAttributes(); - if (finiteStrings == null) { - Set strings = toFiniteStrings.toFiniteStrings(input); - - if (strings.size() > MAX_PATHS) { - throw new IllegalArgumentException("TokenStream expanded to " + strings.size() + " finite strings. Only <= " + MAX_PATHS - + " finite strings are supported"); - } - posInc = strings.size(); - finiteStrings = strings.iterator(); - } - if (finiteStrings.hasNext()) { - posAttr.setPositionIncrement(posInc); - /* - * this posInc encodes the number of paths that this surface form - * produced. Multi Fields have the same surface form and therefore sum up - */ - posInc = 0; - Util.toBytesRef(finiteStrings.next(), bytesAtt.builder()); // now we have UTF-8 - if (charTermAttribute != null) { - charTermAttribute.setLength(0); - charTermAttribute.append(bytesAtt.toUTF16()); - } - if (payload != null) { - payloadAttr.setPayload(this.payload); - } - return true; - } - - return false; - } - - @Override - public void end() throws IOException { - super.end(); - if (posInc == -1) { - input.end(); - } - } - - @Override - public void close() throws IOException { - input.close(); - } - - public interface ToFiniteStrings { - Set toFiniteStrings(TokenStream stream) throws IOException; - } - - @Override - public void reset() throws IOException { - super.reset(); - if (hasAttribute(CharTermAttribute.class)) { - // we only create this if we really need it to safe the UTF-8 to UTF-16 conversion - charTermAttribute = getAttribute(CharTermAttribute.class); - } - finiteStrings = null; - posInc = -1; - } - - public interface ByteTermAttribute extends TermToBytesRefAttribute { - // marker interface - - /** - * Return the builder from which the term is derived. - */ - BytesRefBuilder builder(); - - CharSequence toUTF16(); - } - - public static final class ByteTermAttributeImpl extends AttributeImpl implements ByteTermAttribute, TermToBytesRefAttribute { - private final BytesRefBuilder bytes = new BytesRefBuilder(); - private CharsRefBuilder charsRef; - - @Override - public BytesRefBuilder builder() { - return bytes; - } - - @Override - public BytesRef getBytesRef() { - return bytes.get(); - } - - @Override - public void clear() { - bytes.clear(); - } - - @Override - public void reflectWith(AttributeReflector reflector) { - - } - - @Override - public void copyTo(AttributeImpl target) { - ByteTermAttributeImpl other = (ByteTermAttributeImpl) target; - other.bytes.copyBytes(bytes); - } - - @Override - public CharSequence toUTF16() { - if (charsRef == null) { - charsRef = new CharsRefBuilder(); - } - charsRef.copyUTF8Bytes(getBytesRef()); - return charsRef.get(); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/PayloadProcessor.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/PayloadProcessor.java deleted file mode 100644 index eb857ce61e6..00000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/PayloadProcessor.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x; - -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; - -import java.io.IOException; - -interface PayloadProcessor { - - BytesRef buildPayload(BytesRef surfaceForm, long weight, BytesRef payload) throws IOException; - - void parsePayload(BytesRef payload, SuggestPayload ref) throws IOException; - - static class SuggestPayload { - final BytesRefBuilder payload = new BytesRefBuilder(); - long weight = 0; - final BytesRefBuilder surfaceForm = new BytesRefBuilder(); - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/CategoryContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/CategoryContextMapping.java deleted file mode 100644 index 775d8b031ad..00000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/CategoryContextMapping.java +++ /dev/null @@ -1,374 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x.context; - -import org.apache.lucene.analysis.PrefixAnalyzer; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.util.automaton.Automata; -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.ParseContext.Document; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * The {@link CategoryContextMapping} is used to define a {@link ContextMapping} that - * references a field within a document. The value of the field in turn will be - * used to setup the suggestions made by the completion suggester. - */ -public class CategoryContextMapping extends ContextMapping { - - protected static final String TYPE = "category"; - - private static final String FIELD_FIELDNAME = "path"; - private static final String DEFAULT_FIELDNAME = "_type"; - - private static final Iterable EMPTY_VALUES = Collections.emptyList(); - - private final String fieldName; - private final Iterable defaultValues; - private final FieldConfig defaultConfig; - - /** - * Create a new {@link CategoryContextMapping} with the default field - * [_type] - */ - public CategoryContextMapping(String name) { - this(name, DEFAULT_FIELDNAME, EMPTY_VALUES); - } - - /** - * Create a new {@link CategoryContextMapping} with the default field - * [_type] - */ - public CategoryContextMapping(String name, String fieldName) { - this(name, fieldName, EMPTY_VALUES); - } - - /** - * Create a new {@link CategoryContextMapping} with the default field - * [_type] - */ - public CategoryContextMapping(String name, Iterable defaultValues) { - this(name, DEFAULT_FIELDNAME, defaultValues); - } - - /** - * Create a new {@link CategoryContextMapping} with the default field - * [_type] - */ - public CategoryContextMapping(String name, String fieldName, Iterable defaultValues) { - super(TYPE, name); - this.fieldName = fieldName; - this.defaultValues = defaultValues; - this.defaultConfig = new FieldConfig(fieldName, defaultValues, null); - } - - /** - * Name of the field used by this {@link CategoryContextMapping} - */ - public String getFieldName() { - return fieldName; - } - - public Iterable getDefaultValues() { - return defaultValues; - } - - @Override - public FieldConfig defaultConfig() { - return defaultConfig; - } - - /** - * Load the specification of a {@link CategoryContextMapping} - * - * @param name - * name of the field to use. If null default field - * will be used - * @return new {@link CategoryContextMapping} - */ - protected static CategoryContextMapping load(String name, Map config) throws ElasticsearchParseException { - CategoryContextMapping.Builder mapping = new CategoryContextMapping.Builder(name); - - Object fieldName = config.get(FIELD_FIELDNAME); - Object defaultValues = config.get(FIELD_MISSING); - - if (fieldName != null) { - mapping.fieldName(fieldName.toString()); - config.remove(FIELD_FIELDNAME); - } - - if (defaultValues != null) { - if (defaultValues instanceof Iterable) { - for (Object value : (Iterable) defaultValues) { - mapping.addDefaultValue(value.toString()); - } - } else { - mapping.addDefaultValue(defaultValues.toString()); - } - config.remove(FIELD_MISSING); - } - - return mapping.build(); - } - - @Override - protected XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException { - if (fieldName != null) { - builder.field(FIELD_FIELDNAME, fieldName); - } - builder.startArray(FIELD_MISSING); - for (CharSequence value : defaultValues) { - builder.value(value); - } - builder.endArray(); - return builder; - } - - @Override - public ContextConfig parseContext(ParseContext parseContext, XContentParser parser) throws IOException, ElasticsearchParseException { - Token token = parser.currentToken(); - if (token == Token.VALUE_NULL) { - return new FieldConfig(fieldName, defaultValues, null); - } else if (token == Token.VALUE_STRING) { - return new FieldConfig(fieldName, null, Collections.singleton(parser.text())); - } else if (token == Token.VALUE_NUMBER) { - return new FieldConfig(fieldName, null, Collections.singleton(parser.text())); - } else if (token == Token.VALUE_BOOLEAN) { - return new FieldConfig(fieldName, null, Collections.singleton(parser.text())); - } else if (token == Token.START_ARRAY) { - ArrayList values = new ArrayList<>(); - while((token = parser.nextToken()) != Token.END_ARRAY) { - values.add(parser.text()); - } - if(values.isEmpty()) { - throw new ElasticsearchParseException("FieldConfig must contain a least one category"); - } - return new FieldConfig(fieldName, null, values); - } else { - throw new ElasticsearchParseException("FieldConfig must be either [null], a string or a list of strings"); - } - } - - @Override - public FieldQuery parseQuery(String name, XContentParser parser) throws IOException, ElasticsearchParseException { - Iterable values; - Token token = parser.currentToken(); - if (token == Token.START_ARRAY) { - ArrayList list = new ArrayList<>(); - while ((token = parser.nextToken()) != Token.END_ARRAY) { - list.add(parser.text()); - } - values = list; - } else if (token == Token.VALUE_NULL) { - values = defaultValues; - } else { - values = Collections.singleton(parser.text()); - } - - return new FieldQuery(name, values); - } - - public static FieldQuery query(String name, CharSequence... fieldvalues) { - return query(name, Arrays.asList(fieldvalues)); - } - - public static FieldQuery query(String name, Iterable fieldvalues) { - return new FieldQuery(name, fieldvalues); - } - - @Override - public boolean equals(Object obj) { - if (obj instanceof CategoryContextMapping) { - CategoryContextMapping other = (CategoryContextMapping) obj; - if (this.fieldName.equals(other.fieldName)) { - return Objects.deepEquals(this.defaultValues, other.defaultValues); - } - } - return false; - } - - @Override - public int hashCode() { - int hashCode = fieldName.hashCode(); - for (CharSequence seq : defaultValues) { - hashCode = 31 * hashCode + seq.hashCode(); - } - return hashCode; - } - - private static class FieldConfig extends ContextConfig { - - private final String fieldname; - private final Iterable defaultValues; - private final Iterable values; - - public FieldConfig(String fieldname, Iterable defaultValues, Iterable values) { - this.fieldname = fieldname; - this.defaultValues = defaultValues; - this.values = values; - } - - @Override - protected TokenStream wrapTokenStream(Document doc, TokenStream stream) { - if (values != null) { - return new PrefixAnalyzer.PrefixTokenFilter(stream, ContextMapping.SEPARATOR, values); - // if fieldname is default, BUT our default values are set, we take that one - } else if ((doc.getFields(fieldname).length == 0 - || fieldname.equals(DEFAULT_FIELDNAME)) && defaultValues.iterator().hasNext()) { - return new PrefixAnalyzer.PrefixTokenFilter(stream, ContextMapping.SEPARATOR, defaultValues); - } else { - IndexableField[] fields = doc.getFields(fieldname); - ArrayList values = new ArrayList<>(fields.length); - for (int i = 0; i < fields.length; i++) { - values.add(fields[i].stringValue()); - } - - return new PrefixAnalyzer.PrefixTokenFilter(stream, ContextMapping.SEPARATOR, values); - } - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder("FieldConfig(" + fieldname + " = ["); - if (this.values != null && this.values.iterator().hasNext()) { - final Iterator valuesIterator = this.values.iterator(); - sb.append("("); - while (valuesIterator.hasNext()) { - sb.append(valuesIterator.next()); - if (valuesIterator.hasNext()) { - sb.append(", "); - } - } - sb.append(")"); - } - if (this.defaultValues != null && this.defaultValues.iterator().hasNext()) { - final Iterator defaultValuesIterator = this.defaultValues.iterator(); - sb.append(" default("); - while (defaultValuesIterator.hasNext()) { - sb.append(defaultValuesIterator.next()); - if (defaultValuesIterator.hasNext()) { - sb.append(", "); - } - } - sb.append(")"); - } - return sb.append("])").toString(); - } - - } - - private static class FieldQuery extends ContextQuery { - - private final Iterable values; - - public FieldQuery(String name, Iterable values) { - super(name); - this.values = values; - } - - @Override - public Automaton toAutomaton() { - List automatons = new ArrayList<>(); - for (CharSequence value : values) { - automatons.add(Automata.makeString(value.toString())); - } - return Operations.union(automatons); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startArray(name); - for (CharSequence value : values) { - builder.value(value); - } - builder.endArray(); - return builder; - } - } - - public static class Builder extends ContextBuilder { - - private String fieldname; - private List defaultValues = new ArrayList<>(); - - public Builder(String name) { - this(name, DEFAULT_FIELDNAME); - } - - public Builder(String name, String fieldname) { - super(name); - this.fieldname = fieldname; - } - - /** - * Set the name of the field to use - */ - public Builder fieldName(String fieldname) { - this.fieldname = fieldname; - return this; - } - - /** - * Add value to the default values of the mapping - */ - public Builder addDefaultValue(String defaultValue) { - this.defaultValues.add(defaultValue); - return this; - } - - /** - * Add set of default values to the mapping - */ - public Builder addDefaultValues(String... defaultValues) { - Collections.addAll(this.defaultValues, defaultValues); - return this; - } - - /** - * Add set of default values to the mapping - */ - public Builder addDefaultValues(Iterable defaultValues) { - for (String defaultValue : defaultValues) { - this.defaultValues.add(defaultValue); - } - return this; - } - - @Override - public CategoryContextMapping build() { - return new CategoryContextMapping(name, fieldname, defaultValues); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextBuilder.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextBuilder.java deleted file mode 100644 index 08917637f69..00000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextBuilder.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x.context; - -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MapperParsingException; - -import java.util.Map; -import java.util.Map.Entry; -import java.util.SortedMap; -import java.util.TreeMap; - -public abstract class ContextBuilder { - - protected String name; - - public ContextBuilder(String name) { - this.name = name; - } - - public abstract E build(); - - /** - * Create a new {@link GeolocationContextMapping} - */ - public static GeolocationContextMapping.Builder location(String name) { - return new GeolocationContextMapping.Builder(name); - } - - /** - * Create a new {@link GeolocationContextMapping} with given precision and - * neighborhood usage - * - * @param precision geohash length - * @param neighbors use neighbor cells - */ - public static GeolocationContextMapping.Builder location(String name, int precision, boolean neighbors) { - return new GeolocationContextMapping.Builder(name, neighbors, precision); - } - - /** - * Create a new {@link CategoryContextMapping} - */ - public static CategoryContextMapping.Builder category(String name) { - return new CategoryContextMapping.Builder(name, null); - } - - /** - * Create a new {@link CategoryContextMapping} with default category - * - * @param defaultCategory category to use, if it is not provided - */ - public static CategoryContextMapping.Builder category(String name, String defaultCategory) { - return new CategoryContextMapping.Builder(name, null).addDefaultValue(defaultCategory); - } - - /** - * Create a new {@link CategoryContextMapping} - * - * @param fieldname - * name of the field to use - */ - public static CategoryContextMapping.Builder reference(String name, String fieldname) { - return new CategoryContextMapping.Builder(name, fieldname); - } - - /** - * Create a new {@link CategoryContextMapping} - * - * @param fieldname name of the field to use - * @param defaultValues values to use, if the document not provides - * a field with the given name - */ - public static CategoryContextMapping.Builder reference(String name, String fieldname, Iterable defaultValues) { - return new CategoryContextMapping.Builder(name, fieldname).addDefaultValues(defaultValues); - } - - public static SortedMap loadMappings(Object configuration, Version indexVersionCreated) - throws ElasticsearchParseException { - if (configuration instanceof Map) { - Map configurations = (Map)configuration; - SortedMap mappings = new TreeMap<>(); - for (Entry config : configurations.entrySet()) { - String name = config.getKey(); - mappings.put(name, loadMapping(name, (Map) config.getValue(), indexVersionCreated)); - } - return mappings; - } else if (configuration == null) { - return ContextMapping.EMPTY_MAPPING; - } else { - throw new ElasticsearchParseException("no valid context configuration"); - } - } - - protected static ContextMapping loadMapping(String name, Map config, Version indexVersionCreated) - throws ElasticsearchParseException { - final Object argType = config.get(ContextMapping.FIELD_TYPE); - - if (argType == null) { - throw new ElasticsearchParseException("missing [{}] in context mapping", ContextMapping.FIELD_TYPE); - } - - final String type = argType.toString(); - ContextMapping contextMapping; - if (GeolocationContextMapping.TYPE.equals(type)) { - contextMapping = GeolocationContextMapping.load(name, config); - } else if (CategoryContextMapping.TYPE.equals(type)) { - contextMapping = CategoryContextMapping.load(name, config); - } else { - throw new ElasticsearchParseException("unknown context type [{}]", type); - } - config.remove(ContextMapping.FIELD_TYPE); - DocumentMapperParser.checkNoRemainingFields(name, config, indexVersionCreated); - - return contextMapping; - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextMapping.java deleted file mode 100644 index b92d2e1b614..00000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/ContextMapping.java +++ /dev/null @@ -1,319 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x.context; - -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.search.suggest.analyzing.XAnalyzingSuggester; -import org.apache.lucene.util.automaton.Automata; -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; -import org.apache.lucene.util.fst.FST; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.ParseContext.Document; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.SortedMap; - -/** - * A {@link ContextMapping} is used t define a context that may used - * in conjunction with a suggester. To define a suggester that depends on a - * specific context derived class of {@link ContextMapping} will be - * used to specify the kind of additional information required in order to make - * suggestions. - */ -public abstract class ContextMapping implements ToXContent { - - /** Character used to separate several contexts */ - public static final char SEPARATOR = '\u001D'; - - /** Dummy Context Mapping that should be used if no context is used*/ - public static final SortedMap EMPTY_MAPPING = Collections.emptySortedMap(); - - /** Dummy Context Config matching the Dummy Mapping by providing an empty context*/ - public static final SortedMap EMPTY_CONFIG = Collections.emptySortedMap(); - - /** Dummy Context matching the Dummy Mapping by not wrapping a {@link TokenStream} */ - public static final Context EMPTY_CONTEXT = new Context(EMPTY_CONFIG, null); - - public static final String FIELD_VALUE = "value"; - public static final String FIELD_MISSING = "default"; - public static final String FIELD_TYPE = "type"; - - protected final String type; // Type of the Contextmapping - protected final String name; - - /** - * Define a new context mapping of a specific type - * - * @param type - * name of the new context mapping - */ - protected ContextMapping(String type, String name) { - super(); - this.type = type; - this.name = name; - } - - /** - * @return the type name of the context - */ - protected String type() { - return type; - } - - /** - * @return the name/id of the context - */ - public String name() { - return name; - } - - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(name); - builder.field(FIELD_TYPE, type); - toInnerXContent(builder, params); - builder.endObject(); - return builder; - } - - /** - * A {@link ContextMapping} combined with the information provided by a document - * form a {@link ContextConfig} which is used to build the underlying FST. - * - * @param parseContext context of parsing phase - * @param parser {@link XContentParser} used to read and setup the configuration - * @return A {@link ContextConfig} related to this mapping - * - */ - public abstract ContextConfig parseContext(ParseContext parseContext, XContentParser parser) - throws IOException, ElasticsearchParseException; - - public abstract ContextConfig defaultConfig(); - - /** - * Parse a query according to the context. Parsing starts at parsers current position - * - * @param name name of the context - * @param parser {@link XContentParser} providing the data of the query - * - * @return {@link ContextQuery} according to this mapping - * - */ - public abstract ContextQuery parseQuery(String name, XContentParser parser) throws IOException, ElasticsearchParseException; - - /** - * Since every context mapping is assumed to have a name given by the field name of an context object, this - * method is used to build the value used to serialize the mapping - * - * @param builder builder to append the mapping to - * @param params parameters passed to the builder - * - * @return the builder used - * - */ - protected abstract XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException; - - /** - * Test equality of two mapping - * - * @param thisMappings first mapping - * @param otherMappings second mapping - * - * @return true if both arguments are equal - */ - public static boolean mappingsAreEqual(SortedMap thisMappings, - SortedMap otherMappings) { - return Objects.equals(thisMappings, otherMappings); - } - - @Override - public String toString() { - try { - return toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string(); - } catch (IOException e) { - return super.toString(); - } - } - - /** - * A collection of {@link ContextMapping}s, their {@link ContextConfig}uration and a - * Document form a complete {@link Context}. Since this Object provides all information used - * to setup a suggestion, it can be used to wrap the entire {@link TokenStream} used to build a - * path within the {@link FST}. - */ - public static class Context { - - final SortedMap contexts; - final Document doc; - - public Context(SortedMap contexts, Document doc) { - super(); - this.contexts = contexts; - this.doc = doc; - } - - /** - * Wrap the {@link TokenStream} according to the provided informations of {@link ContextConfig} - * and a related {@link Document}. - * - * @param tokenStream {@link TokenStream} to wrap - * - * @return wrapped token stream - */ - public TokenStream wrapTokenStream(TokenStream tokenStream) { - for (ContextConfig context : contexts.values()) { - tokenStream = context.wrapTokenStream(doc, tokenStream); - } - return tokenStream; - } - } - - /** - * A {@link ContextMapping} combined with the information provided by a document - * form a {@link ContextConfig} which is used to build the underlying {@link FST}. This class hold - * a simple method wrapping a {@link TokenStream} by provided document informations. - */ - public abstract static class ContextConfig { - - /** - * Wrap a {@link TokenStream} for building suggestions to use context informations - * provided by a document or a {@link ContextMapping} - * - * @param doc document related to the stream - * @param stream original stream used to build the underlying {@link FST} - * - * @return A new {@link TokenStream} providing additional context information - */ - protected abstract TokenStream wrapTokenStream(Document doc, TokenStream stream); - - } - - /** - * A {@link ContextQuery} defines the context information for a specific {@link ContextMapping} - * defined within a suggestion request. According to the parameters set in the request and the - * {@link ContextMapping} such a query is used to wrap the {@link TokenStream} of the actual - * suggestion request into a {@link TokenStream} with the context settings - */ - public abstract static class ContextQuery implements ToXContent { - - protected final String name; - - protected ContextQuery(String name) { - this.name = name; - } - - public String name() { - return name; - } - - /** - * Create a automaton for a given context query this automaton will be used - * to find the matching paths with the fst - * - * @param preserveSep set an additional char (XAnalyzingSuggester.SEP_LABEL) between each context query - * @param queries list of {@link ContextQuery} defining the lookup context - * - * @return Automaton matching the given Query - */ - public static Automaton toAutomaton(boolean preserveSep, Iterable queries) { - Automaton a = Automata.makeEmptyString(); - - Automaton gap = Automata.makeChar(ContextMapping.SEPARATOR); - if (preserveSep) { - // if separators are preserved the fst contains a SEP_LABEL - // behind each gap. To have a matching automaton, we need to - // include the SEP_LABEL in the query as well - gap = Operations.concatenate(gap, Automata.makeChar(XAnalyzingSuggester.SEP_LABEL)); - } - - for (ContextQuery query : queries) { - a = Operations.concatenate(Arrays.asList(query.toAutomaton(), gap, a)); - } - - // TODO: should we limit this? Do any of our ContextQuery impls really create exponential regexps? - // GeoQuery looks safe (union of strings). - return Operations.determinize(a, Integer.MAX_VALUE); - } - - /** - * Build a LookUp Automaton for this context. - * @return LookUp Automaton - */ - protected abstract Automaton toAutomaton(); - - /** - * Parse a set of {@link ContextQuery} according to a given mapping - * @param mappings List of mapping defined y the suggest field - * @param parser parser holding the settings of the queries. The parsers - * current token is assumed hold an array. The number of elements - * in this array must match the number of elements in the mappings. - * @return List of context queries - * - * @throws IOException if something unexpected happened on the underlying stream - * @throws ElasticsearchParseException if the list of queries could not be parsed - */ - public static List parseQueries(Map mappings, XContentParser parser) - throws IOException, ElasticsearchParseException { - - Map querySet = new HashMap<>(); - Token token = parser.currentToken(); - if(token == Token.START_OBJECT) { - while ((token = parser.nextToken()) != Token.END_OBJECT) { - String name = parser.currentName(); - ContextMapping mapping = mappings.get(name); - if (mapping == null) { - throw new ElasticsearchParseException("no mapping defined for [{}]", name); - } - parser.nextToken(); - querySet.put(name, mapping.parseQuery(name, parser)); - } - } - - List queries = new ArrayList<>(mappings.size()); - for (ContextMapping mapping : mappings.values()) { - queries.add(querySet.get(mapping.name)); - } - return queries; - } - - @Override - public String toString() { - try { - return toXContent(JsonXContent.contentBuilder(), ToXContent.EMPTY_PARAMS).string(); - } catch (IOException e) { - return super.toString(); - } - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/GeolocationContextMapping.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/GeolocationContextMapping.java deleted file mode 100644 index 5eed19ca00a..00000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/GeolocationContextMapping.java +++ /dev/null @@ -1,750 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.suggest.completion2x.context; - -import com.carrotsearch.hppc.IntHashSet; -import org.apache.lucene.analysis.PrefixAnalyzer.PrefixTokenFilter; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.StringField; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.util.automaton.Automata; -import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; -import org.apache.lucene.util.fst.FST; -import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.geo.GeoHashUtils; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.ParseContext.Document; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.Iterator; -import java.util.Map; - -/** - * The {@link GeolocationContextMapping} allows to take GeoInfomation into account - * during building suggestions. The mapping itself works with geohashes - * explicitly and is configured by three parameters: - *
    - *
  • precision: length of the geohash indexed as prefix of the - * completion field
  • - *
  • neighbors: Should the neighbor cells of the deepest geohash - * level also be indexed as alternatives to the actual geohash
  • - *
  • location: (optional) location assumed if it is not provided
  • - *
- * Internally this mapping wraps the suggestions into a form - * [geohash][suggestion]. If the neighbor option is set the cells - * next to the cell on the deepest geohash level ( precision) will - * be indexed as well. The {@link TokenStream} used to build the {@link FST} for - * suggestion will be wrapped into a {@link PrefixTokenFilter} managing these - * geohases as prefixes. - */ -public class GeolocationContextMapping extends ContextMapping { - - public static final String TYPE = "geo"; - - public static final String FIELD_PRECISION = "precision"; - public static final String FIELD_NEIGHBORS = "neighbors"; - public static final String FIELD_FIELDNAME = "path"; - - private final Collection defaultLocations; - private final int[] precision; - private final boolean neighbors; - private final String fieldName; - private final GeoConfig defaultConfig; - - /** - * Create a new {@link GeolocationContextMapping} with a given precision - * - * @param precision - * length of the geohashes - * @param neighbors - * should neighbors be indexed - * @param defaultLocations - * location to use, if it is not provided by the document - */ - protected GeolocationContextMapping(String name, int[] precision, boolean neighbors, - Collection defaultLocations, String fieldName) { - super(TYPE, name); - this.precision = precision; - this.neighbors = neighbors; - this.defaultLocations = defaultLocations; - this.fieldName = fieldName; - this.defaultConfig = new GeoConfig(this, defaultLocations); - } - - /** - * load a {@link GeolocationContextMapping} by configuration. Such a configuration - * can set the parameters - *
    - *
  • precision [String, Double, - * Float or Integer] defines the length of the - * underlying geohash
  • - *
  • defaultLocation [String] defines the location to use if - * it is not provided by the document
  • - *
  • neighbors [Boolean] defines if the last level of the - * geohash should be extended by neighbor cells
  • - *
- * - * @param config - * Configuration for {@link GeolocationContextMapping} - * @return new {@link GeolocationContextMapping} configured by the parameters of - * config - */ - protected static GeolocationContextMapping load(String name, Map config) { - if (!config.containsKey(FIELD_PRECISION)) { - throw new ElasticsearchParseException("field [precision] is missing"); - } - - final GeolocationContextMapping.Builder builder = new GeolocationContextMapping.Builder(name); - - if (config != null) { - final Object configPrecision = config.get(FIELD_PRECISION); - if (configPrecision == null) { - // ignore precision - } else if (configPrecision instanceof Integer) { - builder.precision((Integer) configPrecision); - config.remove(FIELD_PRECISION); - } else if (configPrecision instanceof Long) { - builder.precision((Long) configPrecision); - config.remove(FIELD_PRECISION); - } else if (configPrecision instanceof Double) { - builder.precision((Double) configPrecision); - config.remove(FIELD_PRECISION); - } else if (configPrecision instanceof Float) { - builder.precision((Float) configPrecision); - config.remove(FIELD_PRECISION); - } else if (configPrecision instanceof Iterable) { - for (Object precision : (Iterable)configPrecision) { - if (precision instanceof Integer) { - builder.precision((Integer) precision); - } else if (precision instanceof Long) { - builder.precision((Long) precision); - } else if (precision instanceof Double) { - builder.precision((Double) precision); - } else if (precision instanceof Float) { - builder.precision((Float) precision); - } else { - builder.precision(precision.toString()); - } - } - config.remove(FIELD_PRECISION); - } else { - builder.precision(configPrecision.toString()); - config.remove(FIELD_PRECISION); - } - - final Object configNeighbors = config.get(FIELD_NEIGHBORS); - if (configNeighbors != null) { - builder.neighbors((Boolean) configNeighbors); - config.remove(FIELD_NEIGHBORS); - } - - final Object def = config.get(FIELD_MISSING); - if (def != null) { - if (def instanceof Iterable) { - for (Object location : (Iterable)def) { - builder.addDefaultLocation(location.toString()); - } - } else if (def instanceof String) { - builder.addDefaultLocation(def.toString()); - } else if (def instanceof Map) { - Map latlonMap = (Map) def; - if (!latlonMap.containsKey("lat") || !(latlonMap.get("lat") instanceof Double)) { - throw new ElasticsearchParseException( - "field [{}] map must have field lat and a valid latitude", FIELD_MISSING); - } - if (!latlonMap.containsKey("lon") || !(latlonMap.get("lon") instanceof Double)) { - throw new ElasticsearchParseException( - "field [{}] map must have field lon and a valid longitude", FIELD_MISSING); - } - builder.addDefaultLocation( - Double.valueOf(latlonMap.get("lat").toString()), Double.valueOf(latlonMap.get("lon").toString())); - } else { - throw new ElasticsearchParseException("field [{}] must be of type string or list", FIELD_MISSING); - } - config.remove(FIELD_MISSING); - } - - final Object fieldName = config.get(FIELD_FIELDNAME); - if (fieldName != null) { - builder.field(fieldName.toString()); - config.remove(FIELD_FIELDNAME); - } - } - return builder.build(); - } - - @Override - protected XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException { - builder.array(FIELD_PRECISION, precision); - builder.field(FIELD_NEIGHBORS, neighbors); - if (defaultLocations != null) { - builder.startArray(FIELD_MISSING); - for (String defaultLocation : defaultLocations) { - builder.value(defaultLocation); - } - builder.endArray(); - } - if (fieldName != null) { - builder.field(FIELD_FIELDNAME, fieldName); - } - return builder; - } - - protected static Collection parseSinglePointOrList(XContentParser parser) throws IOException { - Token token = parser.currentToken(); - if(token == Token.START_ARRAY) { - token = parser.nextToken(); - // Test if value is a single point in [lon, lat] format - if(token == Token.VALUE_NUMBER) { - double lon = parser.doubleValue(); - if(parser.nextToken() == Token.VALUE_NUMBER) { - double lat = parser.doubleValue(); - if(parser.nextToken() == Token.END_ARRAY) { - return Collections.singleton(GeoHashUtils.stringEncode(lon, lat)); - } else { - throw new ElasticsearchParseException("only two values expected"); - } - } else { - throw new ElasticsearchParseException("latitue must be a numeric value"); - } - } else { - // otherwise it's a list of locations - ArrayList result = new ArrayList<>(); - while (token != Token.END_ARRAY) { - result.add(GeoUtils.parseGeoPoint(parser).geohash()); - token = parser.nextToken(); //infinite loop without this line - } - return result; - } - } else { - // or a single location - return Collections.singleton(GeoUtils.parseGeoPoint(parser).geohash()); - } - } - - @Override - public ContextConfig defaultConfig() { - return defaultConfig; - } - - @Override - public ContextConfig parseContext(ParseContext parseContext, XContentParser parser) - throws IOException, ElasticsearchParseException { - - if(fieldName != null) { - FieldMapper mapper = parseContext.docMapper().mappers().getMapper(fieldName); - if(!(mapper instanceof GeoPointFieldMapper)) { - throw new ElasticsearchParseException("referenced field must be mapped to geo_point"); - } - } - - Collection locations; - if(parser.currentToken() == Token.VALUE_NULL) { - locations = null; - } else { - locations = parseSinglePointOrList(parser); - } - return new GeoConfig(this, locations); - } - - /** - * Create a new geolocation query from a given GeoPoint - * - * @param point - * query location - * @return new geolocation query - */ - public static GeoQuery query(String name, GeoPoint point) { - return query(name, point.getGeohash()); - } - - /** - * Create a new geolocation query from a given geocoordinate - * - * @param lat - * latitude of the location - * @param lon - * longitude of the location - * @return new geolocation query - */ - public static GeoQuery query(String name, double lat, double lon, int ... precisions) { - return query(name, GeoHashUtils.stringEncode(lon, lat), precisions); - } - - public static GeoQuery query(String name, double lat, double lon, String ... precisions) { - int precisionInts[] = new int[precisions.length]; - for (int i = 0 ; i < precisions.length; i++) { - precisionInts[i] = GeoUtils.geoHashLevelsForPrecision(precisions[i]); - } - return query(name, GeoHashUtils.stringEncode(lon, lat), precisionInts); - } - - /** - * Create a new geolocation query from a given geohash - * - * @param geohash - * geohash of the location - * @return new geolocation query - */ - public static GeoQuery query(String name, String geohash, int ... precisions) { - return new GeoQuery(name, geohash, precisions); - } - - private static int parsePrecision(XContentParser parser) throws IOException, ElasticsearchParseException { - switch (parser.currentToken()) { - case VALUE_STRING: - return GeoUtils.geoHashLevelsForPrecision(parser.text()); - case VALUE_NUMBER: - switch (parser.numberType()) { - case INT: - case LONG: - return parser.intValue(); - default: - return GeoUtils.geoHashLevelsForPrecision(parser.doubleValue()); - } - default: - throw new ElasticsearchParseException("invalid precision value"); - } - } - - @Override - public GeoQuery parseQuery(String name, XContentParser parser) throws IOException, ElasticsearchParseException { - if (parser.currentToken() == Token.START_OBJECT) { - double lat = Double.NaN; - double lon = Double.NaN; - GeoPoint point = null; - int[] precision = null; - - while (parser.nextToken() != Token.END_OBJECT) { - final String fieldName = parser.currentName(); - if("lat".equals(fieldName)) { - if(point == null) { - parser.nextToken(); - switch (parser.currentToken()) { - case VALUE_NUMBER: - case VALUE_STRING: - lat = parser.doubleValue(true); - break; - default: - throw new ElasticsearchParseException("latitude must be a number"); - } - } else { - throw new ElasticsearchParseException("only lat/lon or [{}] is allowed", FIELD_VALUE); - } - } else if ("lon".equals(fieldName)) { - if(point == null) { - parser.nextToken(); - switch (parser.currentToken()) { - case VALUE_NUMBER: - case VALUE_STRING: - lon = parser.doubleValue(true); - break; - default: - throw new ElasticsearchParseException("longitude must be a number"); - } - } else { - throw new ElasticsearchParseException("only lat/lon or [{}] is allowed", FIELD_VALUE); - } - } else if (FIELD_PRECISION.equals(fieldName)) { - if(parser.nextToken() == Token.START_ARRAY) { - IntHashSet precisions = new IntHashSet(); - while(parser.nextToken() != Token.END_ARRAY) { - precisions.add(parsePrecision(parser)); - } - precision = precisions.toArray(); - } else { - precision = new int[] { parsePrecision(parser) }; - } - } else if (FIELD_VALUE.equals(fieldName)) { - if(Double.isNaN(lon) && Double.isNaN(lat)) { - parser.nextToken(); - point = GeoUtils.parseGeoPoint(parser); - } else { - throw new ElasticsearchParseException("only lat/lon or [{}] is allowed", FIELD_VALUE); - } - } else { - throw new ElasticsearchParseException("unexpected fieldname [{}]", fieldName); - } - } - - if (point == null) { - if (Double.isNaN(lat) || Double.isNaN(lon)) { - throw new ElasticsearchParseException("location is missing"); - } else { - point = new GeoPoint(lat, lon); - } - } - - if (precision == null || precision.length == 0) { - precision = this.precision; - } - - return new GeoQuery(name, point.geohash(), precision); - } else { - return new GeoQuery(name, GeoUtils.parseGeoPoint(parser).getGeohash(), precision); - } - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((defaultLocations == null) ? 0 : defaultLocations.hashCode()); - result = prime * result + ((fieldName == null) ? 0 : fieldName.hashCode()); - result = prime * result + (neighbors ? 1231 : 1237); - result = prime * result + Arrays.hashCode(precision); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - GeolocationContextMapping other = (GeolocationContextMapping) obj; - if (defaultLocations == null) { - if (other.defaultLocations != null) - return false; - } else if (!defaultLocations.equals(other.defaultLocations)) - return false; - if (fieldName == null) { - if (other.fieldName != null) - return false; - } else if (!fieldName.equals(other.fieldName)) - return false; - if (neighbors != other.neighbors) - return false; - if (!Arrays.equals(precision, other.precision)) - return false; - return true; - } - - - - - public static class Builder extends ContextBuilder { - - private IntHashSet precisions = new IntHashSet(); - private boolean neighbors; // take neighbor cell on the lowest level into account - private HashSet defaultLocations = new HashSet<>(); - private String fieldName = null; - - protected Builder(String name) { - this(name, true, null); - } - - protected Builder(String name, boolean neighbors, int...levels) { - super(name); - neighbors(neighbors); - if (levels != null) { - for (int level : levels) { - precision(level); - } - } - } - - /** - * Set the precision use o make suggestions - * - * @param precision - * precision as distance with {@link DistanceUnit}. Default: - * meters - * @return this - */ - public Builder precision(String precision) { - return precision(DistanceUnit.parse(precision, DistanceUnit.METERS, DistanceUnit.METERS)); - } - - /** - * Set the precision use o make suggestions - * - * @param precision - * precision value - * @param unit - * {@link DistanceUnit} to use - * @return this - */ - public Builder precision(double precision, DistanceUnit unit) { - return precision(unit.toMeters(precision)); - } - - /** - * Set the precision use o make suggestions - * - * @param meters - * precision as distance in meters - * @return this - */ - public Builder precision(double meters) { - int level = GeoUtils.geoHashLevelsForPrecision(meters); - // Ceiling precision: we might return more results - if (GeoUtils.geoHashCellSize(level) < meters) { - level = Math.max(1, level - 1); - } - return precision(level); - } - - /** - * Set the precision use o make suggestions - * - * @param level - * maximum length of geohashes - * @return this - */ - public Builder precision(int level) { - this.precisions.add(level); - return this; - } - - /** - * Set neighborhood usage - * - * @param neighbors - * should neighbor cells also be valid - * @return this - */ - public Builder neighbors(boolean neighbors) { - this.neighbors = neighbors; - return this; - } - - /** - * Set a default location that should be used, if no location is - * provided by the query - * - * @param geohash - * geohash of the default location - * @return this - */ - public Builder addDefaultLocation(String geohash) { - this.defaultLocations.add(geohash); - return this; - } - - /** - * Set a default location that should be used, if no location is - * provided by the query - * - * @param geohashes - * geohash of the default location - * @return this - */ - public Builder addDefaultLocations(Collection geohashes) { - this.defaultLocations.addAll(geohashes); - return this; - } - - /** - * Set a default location that should be used, if no location is - * provided by the query - * - * @param lat - * latitude of the default location - * @param lon - * longitude of the default location - * @return this - */ - public Builder addDefaultLocation(double lat, double lon) { - this.defaultLocations.add(GeoHashUtils.stringEncode(lon, lat)); - return this; - } - - /** - * Set a default location that should be used, if no location is - * provided by the query - * - * @param point - * location - * @return this - */ - public Builder defaultLocation(GeoPoint point) { - this.defaultLocations.add(point.geohash()); - return this; - } - - /** - * Set the name of the field containing a geolocation to use - * @param fieldName name of the field - * @return this - */ - public Builder field(String fieldName) { - this.fieldName = fieldName; - return this; - } - - @Override - public GeolocationContextMapping build() { - if(precisions.isEmpty()) { - precisions.add(GeoHashUtils.PRECISION); - } - int[] precisionArray = precisions.toArray(); - Arrays.sort(precisionArray); - return new GeolocationContextMapping(name, precisionArray, neighbors, defaultLocations, fieldName); - } - - } - - private static class GeoConfig extends ContextConfig { - - private final GeolocationContextMapping mapping; - private final Collection locations; - - public GeoConfig(GeolocationContextMapping mapping, Collection locations) { - this.locations = locations; - this.mapping = mapping; - } - - @Override - protected TokenStream wrapTokenStream(Document doc, TokenStream stream) { - Collection geohashes; - - if (locations == null || locations.size() == 0) { - if(mapping.fieldName != null) { - IndexableField[] fields = doc.getFields(mapping.fieldName); - if(fields.length == 0) { - IndexableField[] lonFields = doc.getFields(mapping.fieldName + ".lon"); - IndexableField[] latFields = doc.getFields(mapping.fieldName + ".lat"); - if (lonFields.length > 0 && latFields.length > 0) { - geohashes = new ArrayList<>(fields.length); - GeoPoint spare = new GeoPoint(); - for (int i = 0 ; i < lonFields.length ; i++) { - IndexableField lonField = lonFields[i]; - IndexableField latField = latFields[i]; - assert lonField.fieldType().docValuesType() == latField.fieldType().docValuesType(); - // we write doc values fields differently: one field for all values, - // so we need to only care about indexed fields - if (lonField.fieldType().docValuesType() == DocValuesType.NONE) { - spare.reset(latField.numericValue().doubleValue(), lonField.numericValue().doubleValue()); - geohashes.add(spare.geohash()); - } - } - } else { - geohashes = mapping.defaultLocations; - } - } else { - geohashes = new ArrayList<>(fields.length); - GeoPoint spare = new GeoPoint(); - for (IndexableField field : fields) { - if (field instanceof StringField) { - spare.resetFromString(field.stringValue()); - } else if (field instanceof GeoPointField) { - GeoPointField geoPointField = (GeoPointField) field; - spare.reset(geoPointField.getLat(), geoPointField.getLon()); - } else { - spare.resetFromString(field.stringValue()); - } - geohashes.add(spare.geohash()); - } - } - } else { - geohashes = mapping.defaultLocations; - } - } else { - geohashes = locations; - } - - Collection locations = new HashSet<>(); - for (String geohash : geohashes) { - for (int p : mapping.precision) { - int precision = Math.min(p, geohash.length()); - String truncatedGeohash = geohash.substring(0, precision); - if(mapping.neighbors) { - GeoHashUtils.addNeighbors(truncatedGeohash, precision, locations); - } - locations.add(truncatedGeohash); - } - } - - return new PrefixTokenFilter(stream, ContextMapping.SEPARATOR, locations); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder("GeoConfig(location = ["); - Iterator location = this.locations.iterator(); - if (location.hasNext()) { - sb.append(location.next()); - while (location.hasNext()) { - sb.append(", ").append(location.next()); - } - } - return sb.append("])").toString(); - } - } - - private static class GeoQuery extends ContextQuery { - private final String location; - private final int[] precisions; - - public GeoQuery(String name, String location, int...precisions) { - super(name); - this.location = location; - this.precisions = precisions; - } - - @Override - public Automaton toAutomaton() { - Automaton automaton; - if(precisions == null || precisions.length == 0) { - automaton = Automata.makeString(location); - } else { - automaton = Automata.makeString( - location.substring(0, Math.max(1, Math.min(location.length(), precisions[0])))); - for (int i = 1; i < precisions.length; i++) { - final String cell = location.substring(0, Math.max(1, Math.min(location.length(), precisions[i]))); - automaton = Operations.union(automaton, Automata.makeString(cell)); - } - } - return automaton; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if(precisions == null || precisions.length == 0) { - builder.field(name, location); - } else { - builder.startObject(name); - builder.field(FIELD_VALUE, location); - builder.array(FIELD_PRECISION, precisions); - builder.endObject(); - } - return builder; - } - } -} diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/package-info.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/package-info.java deleted file mode 100644 index 0d9a9e71963..00000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/context/package-info.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * Support for completion suggesters with contexts built on 2.x indices. - */ -package org.elasticsearch.search.suggest.completion2x.context; diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/package-info.java b/core/src/main/java/org/elasticsearch/search/suggest/completion2x/package-info.java deleted file mode 100644 index b8b14aa7c3f..00000000000 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion2x/package-info.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/** - * Support for completion suggesters built on 2.x indices. - */ -package org.elasticsearch.search.suggest.completion2x; diff --git a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java index ed6317fcb30..6ce6ee61938 100644 --- a/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/core/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -83,6 +83,7 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import static java.util.Collections.min; import static java.util.Collections.unmodifiableSet; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; @@ -225,6 +226,8 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis if (!renamedIndices.isEmpty()) { // We have some indices to restore ImmutableOpenMap.Builder shardsBuilder = ImmutableOpenMap.builder(); + final Version minIndexCompatibilityVersion = currentState.getNodes().getMaxNodeVersion() + .minimumIndexCompatibilityVersion(); for (Map.Entry indexEntry : renamedIndices.entrySet()) { String index = indexEntry.getValue(); boolean partial = checkPartial(index); @@ -233,7 +236,8 @@ public class RestoreService extends AbstractComponent implements ClusterStateLis IndexMetaData snapshotIndexMetaData = metaData.index(index); snapshotIndexMetaData = updateIndexSettings(snapshotIndexMetaData, request.indexSettings, request.ignoreIndexSettings); try { - snapshotIndexMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(snapshotIndexMetaData); + snapshotIndexMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(snapshotIndexMetaData, + minIndexCompatibilityVersion); } catch (Exception ex) { throw new SnapshotRestoreException(snapshot, "cannot restore index [" + index + "] because it cannot be upgraded", ex); } diff --git a/core/src/main/java/org/elasticsearch/transport/ConnectionProfile.java b/core/src/main/java/org/elasticsearch/transport/ConnectionProfile.java new file mode 100644 index 00000000000..f6aa1e8445b --- /dev/null +++ b/core/src/main/java/org/elasticsearch/transport/ConnectionProfile.java @@ -0,0 +1,184 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import org.elasticsearch.common.unit.TimeValue; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * A connection profile describes how many connection are established to specific node for each of the available request types. + * ({@link org.elasticsearch.transport.TransportRequestOptions.Type}). This allows to tailor a connection towards a specific usage. + */ +public final class ConnectionProfile { + + /** + * A pre-built light connection profile that shares a single connection across all + * types. + */ + public static final ConnectionProfile LIGHT_PROFILE = new ConnectionProfile( + Collections.singletonList(new ConnectionTypeHandle(0, 1, EnumSet.of( + TransportRequestOptions.Type.BULK, + TransportRequestOptions.Type.PING, + TransportRequestOptions.Type.RECOVERY, + TransportRequestOptions.Type.REG, + TransportRequestOptions.Type.STATE))), 1, null); + + private final List handles; + private final int numConnections; + private final TimeValue connectTimeout; + + private ConnectionProfile(List handles, int numConnections, TimeValue connectTimeout) { + this.handles = handles; + this.numConnections = numConnections; + this.connectTimeout = connectTimeout; + } + + /** + * A builder to build a new {@link ConnectionProfile} + */ + public static class Builder { + private final List handles = new ArrayList<>(); + private final Set addedTypes = EnumSet.noneOf(TransportRequestOptions.Type.class); + private int offset = 0; + private TimeValue connectTimeout; + + /** + * Sets a connect connectTimeout for this connection profile + */ + public void setConnectTimeout(TimeValue connectTimeout) { + if (connectTimeout.millis() < 0) { + throw new IllegalArgumentException("connectTimeout must be non-negative but was: " + connectTimeout); + } + this.connectTimeout = connectTimeout; + } + + /** + * Adds a number of connections for one or more types. Each type can only be added once. + * @param numConnections the number of connections to use in the pool for the given connection types + * @param types a set of types that should share the given number of connections + */ + public void addConnections(int numConnections, TransportRequestOptions.Type... types) { + if (types == null || types.length == 0) { + throw new IllegalArgumentException("types must not be null"); + } + for (TransportRequestOptions.Type type : types) { + if (addedTypes.contains(type)) { + throw new IllegalArgumentException("type [" + type + "] is already registered"); + } + } + addedTypes.addAll(Arrays.asList(types)); + handles.add(new ConnectionTypeHandle(offset, numConnections, EnumSet.copyOf(Arrays.asList(types)))); + offset += numConnections; + } + + /** + * Creates a new {@link ConnectionProfile} based on the added connections. + * @throws IllegalStateException if any of the {@link org.elasticsearch.transport.TransportRequestOptions.Type} enum is missing + */ + public ConnectionProfile build() { + EnumSet types = EnumSet.allOf(TransportRequestOptions.Type.class); + types.removeAll(addedTypes); + if (types.isEmpty() == false) { + throw new IllegalStateException("not all types are added for this connection profile - missing types: " + types); + } + return new ConnectionProfile(Collections.unmodifiableList(handles), offset, connectTimeout); + } + + } + + /** + * Returns the connect timeout or null if no explicit timeout is set on this profile. + */ + public TimeValue getConnectTimeout() { + return connectTimeout; + } + + /** + * Returns the total number of connections for this profile + */ + public int getNumConnections() { + return numConnections; + } + + /** + * Returns the number of connections per type for this profile. This might return a count that is shared with other types such + * that the sum of all connections per type might be higher than {@link #getNumConnections()}. For instance if + * {@link org.elasticsearch.transport.TransportRequestOptions.Type#BULK} shares connections with + * {@link org.elasticsearch.transport.TransportRequestOptions.Type#REG} they will return both the same number of connections from + * this method but the connections are not distinct. + */ + public int getNumConnectionsPerType(TransportRequestOptions.Type type) { + for (ConnectionTypeHandle handle : handles) { + if (handle.getTypes().contains(type)) { + return handle.length; + } + } + throw new AssertionError("no handle found for type: " + type); + } + + /** + * Returns the type handles for this connection profile + */ + List getHandles() { + return Collections.unmodifiableList(handles); + } + + /** + * Connection type handle encapsulates the logic which connection + */ + static final class ConnectionTypeHandle { + public final int length; + public final int offset; + private final Set types; + private final AtomicInteger counter = new AtomicInteger(); + + private ConnectionTypeHandle(int offset, int length, Set types) { + this.length = length; + this.offset = offset; + this.types = types; + } + + /** + * Returns one of the channels out configured for this handle. The channel is selected in a round-robin + * fashion. + */ + T getChannel(T[] channels) { + if (length == 0) { + throw new IllegalStateException("can't select channel size is 0"); + } + assert channels.length >= offset + length : "illegal size: " + channels.length + " expected >= " + (offset + length); + return channels[offset + Math.floorMod(counter.incrementAndGet(), length)]; + } + + /** + * Returns all types for this handle + */ + Set getTypes() { + return types; + } + } + +} diff --git a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java index a68863d0e52..6749d782853 100644 --- a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java +++ b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java @@ -67,7 +67,6 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.support.TransportStatus; import java.io.Closeable; import java.io.IOException; @@ -81,6 +80,8 @@ import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.EnumMap; +import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -91,12 +92,13 @@ import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static java.util.Collections.unmodifiableMap; import static org.elasticsearch.common.settings.Setting.boolSetting; @@ -148,12 +150,6 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i private static final long NINETY_PER_HEAP_SIZE = (long) (JvmInfo.jvmInfo().getMem().getHeapMax().getBytes() * 0.9); private static final int PING_DATA_SIZE = -1; - protected final int connectionsPerNodeRecovery; - protected final int connectionsPerNodeBulk; - protected final int connectionsPerNodeReg; - protected final int connectionsPerNodeState; - protected final int connectionsPerNodePing; - protected final TimeValue connectTimeout; protected final boolean blockingClient; private final CircuitBreakerService circuitBreakerService; // package visibility for tests @@ -178,6 +174,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i protected final boolean compress; protected volatile BoundTransportAddress boundAddress; private final String transportName; + protected final ConnectionProfile defaultConnectionProfile; public TcpTransport(String transportName, Settings settings, ThreadPool threadPool, BigArrays bigArrays, CircuitBreakerService circuitBreakerService, NamedWriteableRegistry namedWriteableRegistry, @@ -192,14 +189,26 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i this.compress = Transport.TRANSPORT_TCP_COMPRESS.get(settings); this.networkService = networkService; this.transportName = transportName; - - this.connectionsPerNodeRecovery = CONNECTIONS_PER_NODE_RECOVERY.get(settings); - this.connectionsPerNodeBulk = CONNECTIONS_PER_NODE_BULK.get(settings); - this.connectionsPerNodeReg = CONNECTIONS_PER_NODE_REG.get(settings); - this.connectionsPerNodeState = CONNECTIONS_PER_NODE_STATE.get(settings); - this.connectionsPerNodePing = CONNECTIONS_PER_NODE_PING.get(settings); - this.connectTimeout = TCP_CONNECT_TIMEOUT.get(settings); this.blockingClient = TCP_BLOCKING_CLIENT.get(settings); + defaultConnectionProfile = buildDefaultConnectionProfile(settings); + } + + static ConnectionProfile buildDefaultConnectionProfile(Settings settings) { + int connectionsPerNodeRecovery = CONNECTIONS_PER_NODE_RECOVERY.get(settings); + int connectionsPerNodeBulk = CONNECTIONS_PER_NODE_BULK.get(settings); + int connectionsPerNodeReg = CONNECTIONS_PER_NODE_REG.get(settings); + int connectionsPerNodeState = CONNECTIONS_PER_NODE_STATE.get(settings); + int connectionsPerNodePing = CONNECTIONS_PER_NODE_PING.get(settings); + ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); + builder.setConnectTimeout(TCP_CONNECT_TIMEOUT.get(settings)); + builder.addConnections(connectionsPerNodeBulk, TransportRequestOptions.Type.BULK); + builder.addConnections(connectionsPerNodePing, TransportRequestOptions.Type.PING); + // if we are not master eligible we don't need a dedicated channel to publish the state + builder.addConnections(DiscoveryNode.isMasterNode(settings) ? connectionsPerNodeState : 0, TransportRequestOptions.Type.STATE); + // if we are not a data-node we don't need any dedicated channels for recovery + builder.addConnections(DiscoveryNode.isDataNode(settings) ? connectionsPerNodeRecovery : 0, TransportRequestOptions.Type.RECOVERY); + builder.addConnections(connectionsPerNodeReg, TransportRequestOptions.Type.REG); + return builder.build(); } @Override @@ -255,7 +264,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i for (Map.Entry entry : connectedNodes.entrySet()) { DiscoveryNode node = entry.getKey(); NodeChannels channels = entry.getValue(); - for (Channel channel : channels.allChannels) { + for (Channel channel : channels.getChannels()) { try { sendMessage(channel, pingHeader, successfulPings::inc); } catch (Exception e) { @@ -304,40 +313,31 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i } } - public class NodeChannels implements Closeable { + public final class NodeChannels implements Closeable { + private final Map typeMapping + = new EnumMap<>(TransportRequestOptions.Type.class); + private final Channel[] channels; + private final AtomicBoolean establishedAllConnections = new AtomicBoolean(false); - public List allChannels = Collections.emptyList(); - public Channel[] recovery; - public final AtomicInteger recoveryCounter = new AtomicInteger(); - public Channel[] bulk; - public final AtomicInteger bulkCounter = new AtomicInteger(); - public Channel[] reg; - public final AtomicInteger regCounter = new AtomicInteger(); - public Channel[] state; - public final AtomicInteger stateCounter = new AtomicInteger(); - public Channel[] ping; - public final AtomicInteger pingCounter = new AtomicInteger(); - - public NodeChannels(Channel[] recovery, Channel[] bulk, Channel[] reg, Channel[] state, Channel[] ping) { - this.recovery = recovery; - this.bulk = bulk; - this.reg = reg; - this.state = state; - this.ping = ping; + public NodeChannels(Channel[] channels, ConnectionProfile connectionProfile) { + this.channels = channels; + assert channels.length == connectionProfile.getNumConnections() : "expected channels size to be == " + + connectionProfile.getNumConnections() + " but was: [" + channels.length + "]"; + for (ConnectionProfile.ConnectionTypeHandle handle : connectionProfile.getHandles()) { + for (TransportRequestOptions.Type type : handle.getTypes()) + typeMapping.put(type, handle); + } } - public void start() { - List newAllChannels = new ArrayList<>(); - newAllChannels.addAll(Arrays.asList(recovery)); - newAllChannels.addAll(Arrays.asList(bulk)); - newAllChannels.addAll(Arrays.asList(reg)); - newAllChannels.addAll(Arrays.asList(state)); - newAllChannels.addAll(Arrays.asList(ping)); - this.allChannels = Collections.unmodifiableList(newAllChannels); + public void connectionsEstablished() { + if (establishedAllConnections.compareAndSet(false, true) == false) { + throw new AssertionError("connected more than once"); + } + } public boolean hasChannel(Channel channel) { - for (Channel channel1 : allChannels) { + for (Channel channel1 : channels) { if (channel.equals(channel1)) { return true; } @@ -345,29 +345,26 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i return false; } - public Channel channel(TransportRequestOptions.Type type) { - if (type == TransportRequestOptions.Type.REG) { - return reg[Math.floorMod(regCounter.incrementAndGet(), reg.length)]; - } else if (type == TransportRequestOptions.Type.STATE) { - return state[Math.floorMod(stateCounter.incrementAndGet(), state.length)]; - } else if (type == TransportRequestOptions.Type.PING) { - return ping[Math.floorMod(pingCounter.incrementAndGet(), ping.length)]; - } else if (type == TransportRequestOptions.Type.BULK) { - return bulk[Math.floorMod(bulkCounter.incrementAndGet(), bulk.length)]; - } else if (type == TransportRequestOptions.Type.RECOVERY) { - return recovery[Math.floorMod(recoveryCounter.incrementAndGet(), recovery.length)]; + public List getChannels() { + if (establishedAllConnections.get()) { // don't expose the channels until we are connected + return Arrays.asList(channels); } else { - throw new IllegalArgumentException("no type channel for [" + type + "]"); + return Collections.emptyList(); } } - public List getChannelArrays() { - return Arrays.asList(recovery, bulk, reg, state, ping); + public Channel channel(TransportRequestOptions.Type type) { + assert establishedAllConnections.get(); + ConnectionProfile.ConnectionTypeHandle connectionTypeHandle = typeMapping.get(type); + if (connectionTypeHandle == null) { + throw new IllegalArgumentException("no type channel for [" + type + "]"); + } + return connectionTypeHandle.getChannel(channels); } @Override public synchronized void close() throws IOException { - closeChannels(allChannels); + closeChannels(Arrays.asList(channels).stream().filter(Objects::nonNull).collect(Collectors.toList())); } } @@ -377,16 +374,8 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i } @Override - public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException { - connectToNode(node, true); - } - - @Override - public void connectToNode(DiscoveryNode node) { - connectToNode(node, false); - } - - public void connectToNode(DiscoveryNode node, boolean light) { + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) { + connectionProfile = connectionProfile == null ? defaultConnectionProfile : connectionProfile; if (!lifecycle.started()) { throw new IllegalStateException("can't add nodes to a stopped transport"); } @@ -405,20 +394,16 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i return; } try { - if (light) { - nodeChannels = connectToChannelsLight(node); - } else { - try { - nodeChannels = connectToChannels(node); - } catch (Exception e) { - logger.trace( - (Supplier) () -> new ParameterizedMessage( - "failed to connect to [{}], cleaning dangling connections", node), e); - throw e; - } + try { + nodeChannels = connectToChannels(node, connectionProfile); + } catch (Exception e) { + logger.trace( + (Supplier) () -> new ParameterizedMessage( + "failed to connect to [{}], cleaning dangling connections", node), e); + throw e; } // we acquire a connection lock, so no way there is an existing connection - nodeChannels.start(); + nodeChannels.connectionsEstablished(); connectedNodes.put(node, nodeChannels); if (logger.isDebugEnabled()) { logger.debug("connected to node [{}]", node); @@ -513,11 +498,6 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i return Version.CURRENT; } - @Override - public boolean addressSupported(Class address) { - return TransportAddress.class.equals(address); - } - @Override public BoundTransportAddress boundAddress() { return this.boundAddress; @@ -889,21 +869,10 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i */ protected abstract void closeChannels(List channel) throws IOException; - /** - * Connects to the given node in a light way. This means we are not creating multiple connections like we do - * for production connections. This connection is for pings or handshakes - */ - protected abstract NodeChannels connectToChannelsLight(DiscoveryNode node) throws IOException; - protected abstract void sendMessage(Channel channel, BytesReference reference, Runnable sendListener) throws IOException; - /** - * Connects to the node in a heavy way. - * - * @see #connectToChannelsLight(DiscoveryNode) - */ - protected abstract NodeChannels connectToChannels(DiscoveryNode node) throws IOException; + protected abstract NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile connectionProfile) throws IOException; /** * Called to tear down internal resources @@ -940,7 +909,7 @@ public abstract class TcpTransport extends AbstractLifecycleComponent i // we pick the smallest of the 2, to support both backward and forward compatibility // note, this is the only place we need to do this, since from here on, we use the serialized version // as the version to use also when the node receiving this request will send the response with - Version version = Version.smallest(getCurrentVersion(), node.getVersion()); + Version version = Version.min(getCurrentVersion(), node.getVersion()); stream.setVersion(version); threadPool.getThreadContext().writeTo(stream); diff --git a/core/src/main/java/org/elasticsearch/transport/Transport.java b/core/src/main/java/org/elasticsearch/transport/Transport.java index c3c178a2c84..96dcd61483d 100644 --- a/core/src/main/java/org/elasticsearch/transport/Transport.java +++ b/core/src/main/java/org/elasticsearch/transport/Transport.java @@ -56,26 +56,16 @@ public interface Transport extends LifecycleComponent { */ TransportAddress[] addressesFromString(String address, int perAddressLimit) throws UnknownHostException; - /** - * Is the address type supported. - */ - boolean addressSupported(Class address); - /** * Returns true if the node is connected. */ boolean nodeConnected(DiscoveryNode node); /** - * Connects to the given node, if already connected, does nothing. + * Connects to a node with the given connection profile. Use {@link ConnectionProfile#LIGHT_PROFILE} when just connecting for ping + * and then disconnecting. If the node is already connected this method has no effect */ - void connectToNode(DiscoveryNode node) throws ConnectTransportException; - - /** - * Connects to a node in a light manner. Used when just connecting for ping and then - * disconnecting. - */ - void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException; + void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) throws ConnectTransportException; /** * Disconnected from the given node, if not connected, will do nothing. @@ -99,5 +89,4 @@ public interface Transport extends LifecycleComponent { default CircuitBreaker getInFlightRequestBreaker() { return new NoopCircuitBreaker("in-flight-noop"); } - } diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 60cdaf7e978..511be6fb84c 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -255,10 +255,6 @@ public class TransportService extends AbstractLifecycleComponent { blockIncomingRequestsLatch.countDown(); } - public final boolean addressSupported(Class address) { - return transport.addressSupported(address); - } - public TransportInfo info() { BoundTransportAddress boundTransportAddress = boundAddress(); if (boundTransportAddress == null) { @@ -285,22 +281,20 @@ public class TransportService extends AbstractLifecycleComponent { } public void connectToNode(DiscoveryNode node) throws ConnectTransportException { - if (node.equals(localNode)) { - return; - } - transport.connectToNode(node); + connectToNode(node, null); } /** - * Lightly connect to the specified node + * Connect to the specified node with the given connection profile * * @param node the node to connect to + * @param connectionProfile the connection profile to use when connecting to this node */ - public void connectToNodeLight(final DiscoveryNode node) { + public void connectToNode(final DiscoveryNode node, ConnectionProfile connectionProfile) { if (node.equals(localNode)) { return; } - transport.connectToNodeLight(node); + transport.connectToNode(node, connectionProfile); } /** @@ -313,10 +307,10 @@ public class TransportService extends AbstractLifecycleComponent { * @throws ConnectTransportException if the connection or the * handshake failed */ - public DiscoveryNode connectToNodeLightAndHandshake( + public DiscoveryNode connectToNodeAndHandshake( final DiscoveryNode node, final long handshakeTimeout) throws ConnectTransportException { - return connectToNodeLightAndHandshake(node, handshakeTimeout, true); + return connectToNodeAndHandshake(node, handshakeTimeout, true); } /** @@ -333,14 +327,14 @@ public class TransportService extends AbstractLifecycleComponent { * @throws ConnectTransportException if the connection failed * @throws IllegalStateException if the handshake failed */ - public DiscoveryNode connectToNodeLightAndHandshake( + public DiscoveryNode connectToNodeAndHandshake( final DiscoveryNode node, final long handshakeTimeout, final boolean checkClusterName) { if (node.equals(localNode)) { return localNode; } - transport.connectToNodeLight(node); + transport.connectToNode(node, ConnectionProfile.LIGHT_PROFILE); try { return handshake(node, handshakeTimeout, checkClusterName); } catch (ConnectTransportException | IllegalStateException e) { diff --git a/core/src/main/java/org/elasticsearch/transport/support/TransportStatus.java b/core/src/main/java/org/elasticsearch/transport/TransportStatus.java similarity index 95% rename from core/src/main/java/org/elasticsearch/transport/support/TransportStatus.java rename to core/src/main/java/org/elasticsearch/transport/TransportStatus.java index 29c2bfb2781..42e0e962a62 100644 --- a/core/src/main/java/org/elasticsearch/transport/support/TransportStatus.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportStatus.java @@ -17,9 +17,9 @@ * under the License. */ -package org.elasticsearch.transport.support; +package org.elasticsearch.transport; -public class TransportStatus { +final class TransportStatus { private static final byte STATUS_REQRES = 1 << 0; private static final byte STATUS_ERROR = 1 << 1; diff --git a/core/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat b/core/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat index 92203175727..2c92f0ecd3f 100644 --- a/core/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat +++ b/core/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat @@ -1,2 +1 @@ org.apache.lucene.search.suggest.document.Completion50PostingsFormat -org.elasticsearch.search.suggest.completion2x.Completion090PostingsFormat \ No newline at end of file diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 1fa2043d547..623f883f492 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.1.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-6.3.0.jar}" { +grant codeBase "${codebase.lucene-core-6.4.0-snapshot-ec38570.jar}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) // java 8 package permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; @@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-core-6.3.0.jar}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; }; -grant codeBase "${codebase.lucene-misc-6.3.0.jar}" { +grant codeBase "${codebase.lucene-misc-6.4.0-snapshot-ec38570.jar}" { // needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper permission java.nio.file.LinkPermission "hard"; }; @@ -92,10 +92,10 @@ grant { permission java.lang.RuntimePermission "modifyThreadGroup"; // needed by ExceptionSerializationTests and RestTestCase for - // some hackish things they do. otherwise only needed by groovy + // some hackish things they do. otherwise only needed by groovy // (TODO: clean this up?) permission java.lang.RuntimePermission "getProtectionDomain"; - + // needed by HotThreads and potentially more // otherwise can be provided only to test libraries permission java.lang.RuntimePermission "getStackTrace"; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 9492b72d030..6f44c37d233 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ - + //// additional test framework permissions. //// These are mock objects and test management that we allow test framework libs //// to provide on our behalf. But tests themselves cannot do this stuff! @@ -33,7 +33,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-6.3.0.jar}" { +grant codeBase "${codebase.lucene-test-framework-6.4.0-snapshot-ec38570.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // needed for testing hardlinks in StoreRecoveryTests since we install MockFS diff --git a/core/src/main/resources/org/elasticsearch/tasks/task-index-mapping.json b/core/src/main/resources/org/elasticsearch/tasks/task-index-mapping.json index 4863cdb539b..0f1a32e1bef 100644 --- a/core/src/main/resources/org/elasticsearch/tasks/task-index-mapping.json +++ b/core/src/main/resources/org/elasticsearch/tasks/task-index-mapping.json @@ -36,7 +36,7 @@ "enabled" : false }, "description": { - "type": "string" + "type": "text" } } }, diff --git a/core/src/test/java/org/apache/lucene/analysis/synonym/SynonymGraphFilterTests.java b/core/src/test/java/org/apache/lucene/analysis/synonym/SynonymGraphFilterTests.java new file mode 100644 index 00000000000..fafe8a954c8 --- /dev/null +++ b/core/src/test/java/org/apache/lucene/analysis/synonym/SynonymGraphFilterTests.java @@ -0,0 +1,1074 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.analysis.synonym; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.BaseTokenStreamTestCase; +import org.apache.lucene.analysis.CannedTokenStream; +import org.apache.lucene.analysis.MockAnalyzer; +import org.apache.lucene.analysis.MockGraphTokenFilter; +import org.apache.lucene.analysis.MockTokenizer; +import org.apache.lucene.analysis.Token; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.TokenStreamToAutomaton; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.TokenStreamToTermAutomatonQuery; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.CharsRefBuilder; +import org.apache.lucene.util.IOUtils; +import org.apache.lucene.util.IntsRef; +import org.apache.lucene.util.TestUtil; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.AutomatonTestUtil; +import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; +import org.apache.lucene.util.automaton.Transition; +import org.apache.lucene.util.fst.Util; + +import java.io.IOException; +import java.io.StringReader; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class SynonymGraphFilterTests extends BaseTokenStreamTestCase { + + /** + * Set a side effect by {@link #getAnalyzer}. + */ + private SynonymGraphFilter synFilter; + + // LUCENE-6664 + public static void assertAnalyzesToPositions(Analyzer a, String input, String[] output, String[] types, int[] posIncrements, int[] + posLengths) throws IOException { + assertAnalyzesTo(a, input, output, null, null, types, posIncrements, posLengths); + } + + public void testBasicKeepOrigOneOutput() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x", true); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c a b", new String[]{"c", "x", "a", "b"}, new int[]{0, 2, 2, 4}, new int[]{1, 5, 3, 5}, new String[]{"word", + "SYNONYM", "word", "word"}, new int[]{1, 1, 0, 1}, new int[]{1, 2, 1, 1}); + a.close(); + } + + public void testMixedKeepOrig() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x", true); + add(b, "e f", "y", false); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c a b c e f g", new String[]{"c", "x", "a", "b", "c", "y", "g"}, new int[]{0, 2, 2, 4, 6, 8, 12}, new + int[]{1, 5, 3, 5, 7, 11, 13}, new String[]{"word", "SYNONYM", "word", "word", "word", "SYNONYM", "word"}, new + int[]{1, 1, 0, + 1, 1, 1, 1}, new int[]{1, 2, 1, 1, 1, 1, 1}); + a.close(); + } + + public void testNoParseAfterBuffer() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "b a", "x", true); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "b b b", new String[]{"b", "b", "b"}, new int[]{0, 2, 4}, new int[]{1, 3, 5}, new String[]{"word", "word", + "word"}, new int[]{1, 1, 1}, new int[]{1, 1, 1}); + a.close(); + } + + public void testOneInputMultipleOutputKeepOrig() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x", true); + add(b, "a b", "y", true); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c a b c", new String[]{"c", "x", "y", "a", "b", "c"}, new int[]{0, 2, 2, 2, 4, 6}, new int[]{1, 5, 5, 3, 5, + 7}, new String[]{"word", "SYNONYM", "SYNONYM", "word", "word", "word"}, new int[]{1, 1, 0, 0, 1, 1, 1, 1}, new + int[]{1, 2, 2, + 1, 1, 1, 1, 1}); + a.close(); + } + + /** + * parse a syn file with bad syntax + */ + public void testInvalidAnalyzesToNothingOutput() throws Exception { + String testFile = "a => 1"; + Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, false); + SolrSynonymParser parser = new SolrSynonymParser(true, true, analyzer); + try { + parser.parse(new StringReader(testFile)); + fail("didn't get expected exception"); + } catch (ParseException expected) { + // expected exc + } + analyzer.close(); + } + + /** + * parse a syn file with bad syntax + */ + public void testInvalidDoubleMap() throws Exception { + String testFile = "a => b => c"; + Analyzer analyzer = new MockAnalyzer(random()); + SolrSynonymParser parser = new SolrSynonymParser(true, true, analyzer); + try { + parser.parse(new StringReader(testFile)); + fail("didn't get expected exception"); + } catch (ParseException expected) { + // expected exc + } + analyzer.close(); + } + + public void testMoreThanOneLookAhead() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b c d", "x", true); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "a b c e", new String[]{"a", "b", "c", "e"}, new int[]{0, 2, 4, 6}, new int[]{1, 3, 5, 7}, new + String[]{"word", "word", "word", "word"}, new int[]{1, 1, 1, 1}, new int[]{1, 1, 1, 1}); + a.close(); + } + + public void testLookaheadAfterParse() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "b b", "x", true); + add(b, "b", "y", true); + + Analyzer a = getAnalyzer(b, true); + + assertAnalyzesTo(a, "b a b b", new String[]{"y", "b", "a", "x", "b", "b"}, new int[]{0, 0, 2, 4, 4, 6}, new int[]{1, 1, 3, 7, 5, + 7}, null, new int[]{1, 0, 1, 1, 0, 1}, new int[]{1, 1, 1, 2, 1, 1}, true); + } + + public void testLookaheadSecondParse() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "b b b", "x", true); + add(b, "b", "y", true); + + Analyzer a = getAnalyzer(b, true); + + assertAnalyzesTo(a, "b b", new String[]{"y", "b", "y", "b"}, new int[]{0, 0, 2, 2}, new int[]{1, 1, 3, 3}, null, new int[]{1, 0, + 1, 0}, new int[]{1, 1, 1, 1}, true); + } + + public void testOneInputMultipleOutputNoKeepOrig() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x", false); + add(b, "a b", "y", false); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c a b c", new String[]{"c", "x", "y", "c"}, new int[]{0, 2, 2, 6}, new int[]{1, 5, 5, 7}, new + String[]{"word", "SYNONYM", "SYNONYM", "word"}, new int[]{1, 1, 0, 1}, new int[]{1, 1, 1, 1}); + a.close(); + } + + public void testOneInputMultipleOutputMixedKeepOrig() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x", true); + add(b, "a b", "y", false); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c a b c", new String[]{"c", "x", "y", "a", "b", "c"}, new int[]{0, 2, 2, 2, 4, 6}, new int[]{1, 5, 5, 3, 5, + 7}, new String[]{"word", "SYNONYM", "SYNONYM", "word", "word", "word"}, new int[]{1, 1, 0, 0, 1, 1, 1, 1}, new + int[]{1, 2, 2, + 1, 1, 1, 1, 1}); + a.close(); + } + + public void testSynAtEnd() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x", true); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c d e a b", new String[]{"c", "d", "e", "x", "a", "b"}, new int[]{0, 2, 4, 6, 6, 8}, new int[]{1, 3, 5, 9, + 7, 9}, new String[]{"word", "word", "word", "SYNONYM", "word", "word"}, new int[]{1, 1, 1, 1, 0, 1}, new int[]{1, 1, 1, + 2, 1, + 1}); + a.close(); + } + + public void testTwoSynsInARow() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a", "x", false); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c a a b", new String[]{"c", "x", "x", "b"}, new int[]{0, 2, 4, 6}, new int[]{1, 3, 5, 7}, new + String[]{"word", "SYNONYM", "SYNONYM", "word"}, new int[]{1, 1, 1, 1}, new int[]{1, 1, 1, 1}); + a.close(); + } + + public void testBasicKeepOrigTwoOutputs() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x y", true); + add(b, "a b", "m n o", true); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c a b d", new String[]{"c", "x", "m", "a", "y", "n", "o", "b", "d"}, new int[]{0, 2, 2, 2, 2, 2, 2, 4, 6}, + new int[]{1, 5, 5, 3, 5, 5, 5, 5, 7}, new String[]{"word", "SYNONYM", "SYNONYM", "word", "SYNONYM", + "SYNONYM", "SYNONYM", + "word", "word"}, new int[]{1, 1, 0, 0, 1, 1, 1, 1, 1}, new int[]{1, 1, 2, 4, 4, 1, 2, 1, 1}); + a.close(); + } + + public void testNoCaptureIfNoMatch() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x y", true); + + Analyzer a = getAnalyzer(b, true); + + assertAnalyzesTo(a, "c d d", new String[]{"c", "d", "d"}, new int[]{0, 2, 4}, new int[]{1, 3, 5}, new String[]{"word", "word", + "word"}, new int[]{1, 1, 1}, new int[]{1, 1, 1}); + assertEquals(0, synFilter.getCaptureCount()); + a.close(); + } + + public void testBasicNotKeepOrigOneOutput() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x", false); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c a b", new String[]{"c", "x"}, new int[]{0, 2}, new int[]{1, 5}, new String[]{"word", "SYNONYM"}, new + int[]{1, 1}, new int[]{1, 1}); + a.close(); + } + + public void testBasicNoKeepOrigTwoOutputs() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x y", false); + add(b, "a b", "m n o", false); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c a b d", new String[]{"c", "x", "m", "y", "n", "o", "d"}, new int[]{0, 2, 2, 2, 2, 2, 6}, new int[]{1, 5, + 5, 5, 5, 5, 7}, new String[]{"word", "SYNONYM", "SYNONYM", "SYNONYM", "SYNONYM", "SYNONYM", + "word"}, new int[]{1, 1, 0, 1, 1, + 1, 1}, new int[]{1, 1, 2, 3, 1, 1, 1}); + a.close(); + } + + public void testIgnoreCase() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x y", false); + add(b, "a b", "m n o", false); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c A B D", new String[]{"c", "x", "m", "y", "n", "o", "D"}, new int[]{0, 2, 2, 2, 2, 2, 6}, new int[]{1, 5, + 5, 5, 5, 5, 7}, new String[]{"word", "SYNONYM", "SYNONYM", "SYNONYM", "SYNONYM", "SYNONYM", + "word"}, new int[]{1, 1, 0, 1, 1, + 1, 1}, new int[]{1, 1, 2, 3, 1, 1, 1}); + a.close(); + } + + public void testDoNotIgnoreCase() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x y", false); + add(b, "a b", "m n o", false); + + Analyzer a = getAnalyzer(b, false); + assertAnalyzesTo(a, "c A B D", new String[]{"c", "A", "B", "D"}, new int[]{0, 2, 4, 6}, new int[]{1, 3, 5, 7}, new + String[]{"word", "word", "word", "word"}, new int[]{1, 1, 1, 1}, new int[]{1, 1, 1, 1}); + a.close(); + } + + public void testBufferedFinish1() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b c", "m n o", false); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c a b", new String[]{"c", "a", "b"}, new int[]{0, 2, 4}, new int[]{1, 3, 5}, new String[]{"word", "word", + "word"}, new int[]{1, 1, 1}, new int[]{1, 1, 1}); + a.close(); + } + + public void testBufferedFinish2() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "m n o", false); + add(b, "d e", "m n o", false); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "c a d", new String[]{"c", "a", "d"}, new int[]{0, 2, 4}, new int[]{1, 3, 5}, new String[]{"word", "word", + "word"}, new int[]{1, 1, 1}, new int[]{1, 1, 1}); + a.close(); + } + + public void testCanReuse() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b", "x", true); + Analyzer a = getAnalyzer(b, true); + for (int i = 0; i < 10; i++) { + assertAnalyzesTo(a, "c a b", new String[]{"c", "x", "a", "b"}, new int[]{0, 2, 2, 4}, new int[]{1, 5, 3, 5}, new + String[]{"word", "SYNONYM", "word", "word"}, new int[]{1, 1, 0, 1}, new int[]{1, 2, 1, 1}); + } + a.close(); + } + + /** + * Multiple input tokens map to a single output token + */ + public void testManyToOne() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b c", "z", true); + + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "a b c d", new String[]{"z", "a", "b", "c", "d"}, new int[]{0, 0, 2, 4, 6}, new int[]{5, 1, 3, 5, 7}, new + String[]{"SYNONYM", "word", "word", "word", "word"}, new int[]{1, 0, 1, 1, 1}, new int[]{3, 1, 1, 1, 1}); + a.close(); + } + + public void testBufferAfterMatch() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "a b c d", "x", true); + add(b, "a b", "y", false); + + // The 'c' token has to be buffered because SynGraphFilter + // needs to know whether a b c d -> x matches: + Analyzer a = getAnalyzer(b, true); + assertAnalyzesTo(a, "f a b c e", new String[]{"f", "y", "c", "e"}, new int[]{0, 2, 6, 8}, new int[]{1, 5, 7, 9}, new + String[]{"word", "SYNONYM", "word", "word"}, new int[]{1, 1, 1, 1}, new int[]{1, 1, 1, 1}); + a.close(); + } + + public void testZeroSyns() throws Exception { + Tokenizer tokenizer = new MockTokenizer(); + tokenizer.setReader(new StringReader("aa bb")); + try { + new SynonymGraphFilter(tokenizer, new SynonymMap.Builder(true).build(), true); + fail("did not hit expected exception"); + } catch (IllegalArgumentException iae) { + // expected + assertEquals("fst must be non-null", iae.getMessage()); + } + } + + // Needs TermAutomatonQuery, which is in sandbox still: + public void testAccurateGraphQuery1() throws Exception { + Directory dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(newTextField("field", "wtf happened", Field.Store.NO)); + w.addDocument(doc); + IndexReader r = w.getReader(); + w.close(); + + IndexSearcher s = newSearcher(r); + + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "what the fudge", "wtf", true); + + SynonymMap map = b.build(); + + TokenStreamToTermAutomatonQuery ts2q = new TokenStreamToTermAutomatonQuery(); + + + TokenStream in = new CannedTokenStream(0, 23, token("what", 1, 1, 0, 4), token("the", 1, 1, 5, 8), token("fudge", 1, 1, 9, 14), + token("happened", 1, 1, 15, 23)); + + assertEquals(1, s.count(ts2q.toQuery("field", new SynonymGraphFilter(in, map, true)))); + + in = new CannedTokenStream(0, 12, token("wtf", 1, 1, 0, 3), token("happened", 1, 1, 4, 12)); + + assertEquals(1, s.count(ts2q.toQuery("field", new SynonymGraphFilter(in, map, true)))); + + // "what happened" should NOT match: + in = new CannedTokenStream(0, 13, token("what", 1, 1, 0, 4), token("happened", 1, 1, 5, 13)); + assertEquals(0, s.count(ts2q.toQuery("field", new SynonymGraphFilter(in, map, true)))); + + IOUtils.close(r, dir); + } + + + /** + * If we expand synonyms at search time, the results are correct. + */ + // Needs TermAutomatonQuery, which is in sandbox still: + public void testAccurateGraphQuery2() throws Exception { + Directory dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(newTextField("field", "say wtf happened", Field.Store.NO)); + w.addDocument(doc); + IndexReader r = w.getReader(); + w.close(); + + IndexSearcher s = newSearcher(r); + + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "fudge", "chocolate", true); + add(b, "what the fudge", "wtf", true); + add(b, "what the", "wut", true); + add(b, "say", "say what", true); + + SynonymMap map = b.build(); + + TokenStream in = new CannedTokenStream(0, 26, token("say", 1, 1, 0, 3), token("what", 1, 1, 3, 7), token("the", 1, 1, 8, 11), + token("fudge", 1, 1, 12, 17), token("happened", 1, 1, 18, 26)); + + TokenStreamToTermAutomatonQuery ts2q = new TokenStreamToTermAutomatonQuery(); + + assertEquals(1, s.count(ts2q.toQuery("field", new SynonymGraphFilter(in, map, true)))); + + // "what happened" should NOT match: + in = new CannedTokenStream(0, 13, token("what", 1, 1, 0, 4), token("happened", 1, 1, 5, 13)); + assertEquals(0, s.count(ts2q.toQuery("field", new SynonymGraphFilter(in, map, true)))); + + IOUtils.close(r, dir); + } + + + // Needs TermAutomatonQuery, which is in sandbox still: + public void testAccurateGraphQuery3() throws Exception { + Directory dir = newDirectory(); + RandomIndexWriter w = new RandomIndexWriter(random(), dir); + Document doc = new Document(); + doc.add(newTextField("field", "say what the fudge happened", Field.Store.NO)); + w.addDocument(doc); + IndexReader r = w.getReader(); + w.close(); + + IndexSearcher s = newSearcher(r); + + SynonymMap.Builder b = new SynonymMap.Builder(true); + add(b, "wtf", "what the fudge", true); + + SynonymMap map = b.build(); + + TokenStream in = new CannedTokenStream(0, 15, token("say", 1, 1, 0, 3), token("wtf", 1, 1, 3, 6), token("happened", 1, 1, 7, 15)); + + TokenStreamToTermAutomatonQuery ts2q = new TokenStreamToTermAutomatonQuery(); + + assertEquals(1, s.count(ts2q.toQuery("field", new SynonymGraphFilter(in, map, true)))); + + // "what happened" should NOT match: + in = new CannedTokenStream(0, 13, token("what", 1, 1, 0, 4), token("happened", 1, 1, 5, 13)); + assertEquals(0, s.count(ts2q.toQuery("field", new SynonymGraphFilter(in, map, true)))); + + IOUtils.close(r, dir); + } + + private static Token token(String term, int posInc, int posLength, int startOffset, int endOffset) { + final Token t = new Token(term, startOffset, endOffset); + t.setPositionIncrement(posInc); + t.setPositionLength(posLength); + return t; + } + + private String randomNonEmptyString() { + while (true) { + String s = TestUtil.randomUnicodeString(random()).trim(); + //String s = TestUtil.randomSimpleString(random()).trim(); + if (s.length() != 0 && s.indexOf('\u0000') == -1) { + return s; + } + } + } + + // Adds MockGraphTokenFilter after SynFilter: + public void testRandomGraphAfter() throws Exception { + final int numIters = atLeast(3); + for (int i = 0; i < numIters; i++) { + SynonymMap.Builder b = new SynonymMap.Builder(random().nextBoolean()); + final int numEntries = atLeast(10); + for (int j = 0; j < numEntries; j++) { + add(b, randomNonEmptyString(), randomNonEmptyString(), random().nextBoolean()); + } + final SynonymMap map = b.build(); + final boolean ignoreCase = random().nextBoolean(); + + final Analyzer analyzer = new Analyzer() { + @Override + protected TokenStreamComponents createComponents(String fieldName) { + Tokenizer tokenizer = new MockTokenizer(MockTokenizer.SIMPLE, true); + TokenStream syns = new SynonymGraphFilter(tokenizer, map, ignoreCase); + TokenStream graph = new MockGraphTokenFilter(random(), syns); + return new TokenStreamComponents(tokenizer, graph); + } + }; + + checkRandomData(random(), analyzer, 100); + analyzer.close(); + } + } + + public void testEmptyStringInput() throws IOException { + final int numIters = atLeast(10); + for (int i = 0; i < numIters; i++) { + SynonymMap.Builder b = new SynonymMap.Builder(random().nextBoolean()); + final int numEntries = atLeast(10); + for (int j = 0; j < numEntries; j++) { + add(b, randomNonEmptyString(), randomNonEmptyString(), random().nextBoolean()); + } + final boolean ignoreCase = random().nextBoolean(); + + Analyzer analyzer = getAnalyzer(b, ignoreCase); + + checkAnalysisConsistency(random(), analyzer, random().nextBoolean(), ""); + analyzer.close(); + } + } + + /** + * simple random test, doesn't verify correctness. + * does verify it doesnt throw exceptions, or that the stream doesn't misbehave + */ + public void testRandom2() throws Exception { + final int numIters = atLeast(3); + for (int i = 0; i < numIters; i++) { + SynonymMap.Builder b = new SynonymMap.Builder(random().nextBoolean()); + final int numEntries = atLeast(10); + for (int j = 0; j < numEntries; j++) { + add(b, randomNonEmptyString(), randomNonEmptyString(), random().nextBoolean()); + } + final boolean ignoreCase = random().nextBoolean(); + + Analyzer analyzer = getAnalyzer(b, ignoreCase); + checkRandomData(random(), analyzer, 100); + analyzer.close(); + } + } + + /** + * simple random test like testRandom2, but for larger docs + */ + public void testRandomHuge() throws Exception { + final int numIters = atLeast(3); + for (int i = 0; i < numIters; i++) { + SynonymMap.Builder b = new SynonymMap.Builder(random().nextBoolean()); + final int numEntries = atLeast(10); + //if (VERBOSE) { + //System.out.println("TEST: iter=" + i + " numEntries=" + numEntries); + //} + for (int j = 0; j < numEntries; j++) { + add(b, randomNonEmptyString(), randomNonEmptyString(), random().nextBoolean()); + } + final boolean ignoreCase = random().nextBoolean(); + + Analyzer analyzer = getAnalyzer(b, ignoreCase); + checkRandomData(random(), analyzer, 100, 1024); + analyzer.close(); + } + } + + public void testEmptyTerm() throws IOException { + final int numIters = atLeast(10); + for (int i = 0; i < numIters; i++) { + SynonymMap.Builder b = new SynonymMap.Builder(random().nextBoolean()); + final int numEntries = atLeast(10); + for (int j = 0; j < numEntries; j++) { + add(b, randomNonEmptyString(), randomNonEmptyString(), random().nextBoolean()); + } + final boolean ignoreCase = random().nextBoolean(); + + final Analyzer analyzer = getAnalyzer(b, ignoreCase); + + checkAnalysisConsistency(random(), analyzer, random().nextBoolean(), ""); + analyzer.close(); + } + } + + public void testBuilderDedup() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + final boolean keepOrig = false; + add(b, "a b", "ab", keepOrig); + add(b, "a b", "ab", keepOrig); + add(b, "a b", "ab", keepOrig); + Analyzer a = getAnalyzer(b, true); + + assertAnalyzesTo(a, "a b", new String[]{"ab"}, new int[]{1}); + a.close(); + } + + public void testBuilderNoDedup() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(false); + final boolean keepOrig = false; + add(b, "a b", "ab", keepOrig); + add(b, "a b", "ab", keepOrig); + add(b, "a b", "ab", keepOrig); + Analyzer a = getAnalyzer(b, true); + + assertAnalyzesTo(a, "a b", new String[]{"ab", "ab", "ab"}, new int[]{1, 0, 0}); + a.close(); + } + + public void testRecursion1() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + final boolean keepOrig = false; + add(b, "zoo", "zoo", keepOrig); + Analyzer a = getAnalyzer(b, true); + + assertAnalyzesTo(a, "zoo zoo $ zoo", new String[]{"zoo", "zoo", "$", "zoo"}, new int[]{1, 1, 1, 1}); + a.close(); + } + + public void testRecursion2() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + final boolean keepOrig = false; + add(b, "zoo", "zoo", keepOrig); + add(b, "zoo", "zoo zoo", keepOrig); + Analyzer a = getAnalyzer(b, true); + + // verify("zoo zoo $ zoo", "zoo/zoo zoo/zoo/zoo $/zoo zoo/zoo zoo"); + assertAnalyzesTo(a, "zoo zoo $ zoo", new String[]{"zoo", "zoo", "zoo", "zoo", "zoo", "zoo", "$", "zoo", "zoo", "zoo"}, new + int[]{1, 0, 1, 1, 0, 1, 1, 1, 0, 1}); + a.close(); + } + + public void testKeepOrig() throws Exception { + SynonymMap.Builder b = new SynonymMap.Builder(true); + final boolean keepOrig = true; + add(b, "a b", "ab", keepOrig); + add(b, "a c", "ac", keepOrig); + add(b, "a", "aa", keepOrig); + add(b, "b", "bb", keepOrig); + add(b, "z x c v", "zxcv", keepOrig); + add(b, "x c", "xc", keepOrig); + Analyzer a = getAnalyzer(b, true); + + assertAnalyzesTo(a, "$", new String[]{"$"}, new int[]{1}); + assertAnalyzesTo(a, "a", new String[]{"aa", "a"}, new int[]{1, 0}); + assertAnalyzesTo(a, "a", new String[]{"aa", "a"}, new int[]{1, 0}); + assertAnalyzesTo(a, "$ a", new String[]{"$", "aa", "a"}, new int[]{1, 1, 0}); + assertAnalyzesTo(a, "a $", new String[]{"aa", "a", "$"}, new int[]{1, 0, 1}); + assertAnalyzesTo(a, "$ a !", new String[]{"$", "aa", "a", "!"}, new int[]{1, 1, 0, 1}); + assertAnalyzesTo(a, "a a", new String[]{"aa", "a", "aa", "a"}, new int[]{1, 0, 1, 0}); + assertAnalyzesTo(a, "b", new String[]{"bb", "b"}, new int[]{1, 0}); + assertAnalyzesTo(a, "z x c v", new String[]{"zxcv", "z", "x", "c", "v"}, new int[]{1, 0, 1, 1, 1}); + assertAnalyzesTo(a, "z x c $", new String[]{"z", "xc", "x", "c", "$"}, new int[]{1, 1, 0, 1, 1}); + a.close(); + } + + private Analyzer getAnalyzer(SynonymMap.Builder b, final boolean ignoreCase) throws IOException { + final SynonymMap map = b.build(); + return new Analyzer() { + @Override + protected TokenStreamComponents createComponents(String fieldName) { + Tokenizer tokenizer = new MockTokenizer(MockTokenizer.WHITESPACE, false); + // Make a local variable so testRandomHuge doesn't share it across threads! + SynonymGraphFilter synFilter = new SynonymGraphFilter(tokenizer, map, ignoreCase); + SynonymGraphFilterTests.this.synFilter = synFilter; + return new TokenStreamComponents(tokenizer, synFilter); + } + }; + } + + private void add(SynonymMap.Builder b, String input, String output, boolean keepOrig) { + if (VERBOSE) { + //System.out.println(" add input=" + input + " output=" + output + " keepOrig=" + keepOrig); + } + CharsRefBuilder inputCharsRef = new CharsRefBuilder(); + SynonymMap.Builder.join(input.split(" +"), inputCharsRef); + + CharsRefBuilder outputCharsRef = new CharsRefBuilder(); + SynonymMap.Builder.join(output.split(" +"), outputCharsRef); + + b.add(inputCharsRef.get(), outputCharsRef.get(), keepOrig); + } + + private char[] randomBinaryChars(int minLen, int maxLen, double bias, char base) { + int len = TestUtil.nextInt(random(), minLen, maxLen); + char[] chars = new char[len]; + for (int i = 0; i < len; i++) { + char ch; + if (random().nextDouble() < bias) { + ch = base; + } else { + ch = (char) (base + 1); + } + chars[i] = ch; + } + + return chars; + } + + private static String toTokenString(char[] chars) { + StringBuilder b = new StringBuilder(); + for (char c : chars) { + if (b.length() > 0) { + b.append(' '); + } + b.append(c); + } + return b.toString(); + } + + private static class OneSyn { + char[] in; + char[] out; + boolean keepOrig; + + @Override + public String toString() { + return toTokenString(in) + " --> " + toTokenString(out) + " (keepOrig=" + keepOrig + ")"; + } + } + + public void testRandomSyns() throws Exception { + int synCount = atLeast(10); + double bias = random().nextDouble(); + boolean dedup = random().nextBoolean(); + + SynonymMap.Builder b = new SynonymMap.Builder(dedup); + List syns = new ArrayList<>(); + // Makes random syns from random a / b tokens, mapping to random x / y tokens + //if (VERBOSE) { + // System.out.println("TEST: make " + synCount + " syns"); + // System.out.println(" bias for a over b=" + bias); + // System.out.println(" dedup=" + dedup); + // System.out.println(" sausage=" + sausage); + //} + + int maxSynLength = 0; + + for (int i = 0; i < synCount; i++) { + OneSyn syn = new OneSyn(); + syn.in = randomBinaryChars(1, 5, bias, 'a'); + syn.out = randomBinaryChars(1, 5, 0.5, 'x'); + syn.keepOrig = random().nextBoolean(); + syns.add(syn); + + maxSynLength = Math.max(maxSynLength, syn.in.length); + + //if (VERBOSE) { + // System.out.println(" " + syn); + //} + add(b, toTokenString(syn.in), toTokenString(syn.out), syn.keepOrig); + } + + // Only used w/ VERBOSE: + Analyzer aNoSausageed; + if (VERBOSE) { + aNoSausageed = getAnalyzer(b, true); + } else { + aNoSausageed = null; + } + + Analyzer a = getAnalyzer(b, true); + int iters = atLeast(20); + for (int iter = 0; iter < iters; iter++) { + + String doc = toTokenString(randomBinaryChars(50, 100, bias, 'a')); + //String doc = toTokenString(randomBinaryChars(10, 50, bias, 'a')); + + //if (VERBOSE) { + // System.out.println("TEST: iter=" + iter + " doc=" + doc); + //} + Automaton expected = slowSynFilter(doc, syns); + if (VERBOSE) { + //System.out.println(" expected:\n" + expected.toDot()); + } + Automaton actual = toAutomaton(a.tokenStream("field", new StringReader(doc))); + //if (VERBOSE) { + // System.out.println(" actual:\n" + actual.toDot()); + //} + + assertTrue("maxLookaheadUsed=" + synFilter.getMaxLookaheadUsed() + " maxSynLength=" + maxSynLength, synFilter + .getMaxLookaheadUsed() <= maxSynLength); + + checkAnalysisConsistency(random(), a, random().nextBoolean(), doc); + // We can easily have a non-deterministic automaton at this point, e.g. if + // more than one syn matched at given point, or if the syn mapped to an + // output token that also happens to be in the input: + try { + actual = Operations.determinize(actual, 50000); + } catch (TooComplexToDeterminizeException tctde) { + // Unfortunately the syns can easily create difficult-to-determinize graphs: + assertTrue(approxEquals(actual, expected)); + continue; + } + + try { + expected = Operations.determinize(expected, 50000); + } catch (TooComplexToDeterminizeException tctde) { + // Unfortunately the syns can easily create difficult-to-determinize graphs: + assertTrue(approxEquals(actual, expected)); + continue; + } + + assertTrue(approxEquals(actual, expected)); + assertTrue(Operations.sameLanguage(actual, expected)); + } + + a.close(); + } + + /** + * Only used when true equality is too costly to check! + */ + private boolean approxEquals(Automaton actual, Automaton expected) { + // Don't collapse these into one line else the thread stack won't say which direction failed!: + boolean b1 = approxSubsetOf(actual, expected); + boolean b2 = approxSubsetOf(expected, actual); + return b1 && b2; + } + + private boolean approxSubsetOf(Automaton a1, Automaton a2) { + AutomatonTestUtil.RandomAcceptedStrings ras = new AutomatonTestUtil.RandomAcceptedStrings(a1); + for (int i = 0; i < 2000; i++) { + int[] ints = ras.getRandomAcceptedString(random()); + IntsRef path = new IntsRef(ints, 0, ints.length); + if (accepts(a2, path) == false) { + throw new RuntimeException("a2 does not accept " + path); + } + } + + // Presumed true + return true; + } + + /** + * Like {@link Operations#run} except the incoming automaton is allowed to be non-deterministic. + */ + private static boolean accepts(Automaton a, IntsRef path) { + Set states = new HashSet<>(); + states.add(0); + Transition t = new Transition(); + for (int i = 0; i < path.length; i++) { + int digit = path.ints[path.offset + i]; + Set nextStates = new HashSet<>(); + for (int state : states) { + int count = a.initTransition(state, t); + for (int j = 0; j < count; j++) { + a.getNextTransition(t); + if (digit >= t.min && digit <= t.max) { + nextStates.add(t.dest); + } + } + } + states = nextStates; + if (states.isEmpty()) { + return false; + } + } + + for (int state : states) { + if (a.isAccept(state)) { + return true; + } + } + + return false; + } + + /** + * Stupid, slow brute-force, yet hopefully bug-free, synonym filter. + */ + private Automaton slowSynFilter(String doc, List syns) { + String[] tokens = doc.split(" +"); + //if (VERBOSE) { + // System.out.println(" doc has " + tokens.length + " tokens"); + //} + int i = 0; + Automaton.Builder a = new Automaton.Builder(); + int lastState = a.createState(); + while (i < tokens.length) { + // Consider all possible syn matches starting at this point: + assert tokens[i].length() == 1; + //if (VERBOSE) { + // System.out.println(" i=" + i); + //} + + List matches = new ArrayList<>(); + for (OneSyn syn : syns) { + if (i + syn.in.length <= tokens.length) { + boolean match = true; + for (int j = 0; j < syn.in.length; j++) { + if (tokens[i + j].charAt(0) != syn.in[j]) { + match = false; + break; + } + } + + if (match) { + if (matches.isEmpty() == false) { + if (syn.in.length < matches.get(0).in.length) { + // Greedy matching: we already found longer syns matching here + continue; + } else if (syn.in.length > matches.get(0).in.length) { + // Greedy matching: all previous matches were shorter, so we drop them + matches.clear(); + } else { + // Keep the current matches: we allow multiple synonyms matching the same input string + } + } + + matches.add(syn); + } + } + } + + int nextState = a.createState(); + + if (matches.isEmpty() == false) { + // We have match(es) starting at this token + //if (VERBOSE) { + // System.out.println(" matches @ i=" + i + ": " + matches); + //} + // We keepOrig if any of the matches said to: + boolean keepOrig = false; + for (OneSyn syn : matches) { + keepOrig |= syn.keepOrig; + } + + if (keepOrig) { + // Add path for the original tokens + addSidePath(a, lastState, nextState, matches.get(0).in); + } + + for (OneSyn syn : matches) { + addSidePath(a, lastState, nextState, syn.out); + } + + i += matches.get(0).in.length; + } else { + a.addTransition(lastState, nextState, tokens[i].charAt(0)); + i++; + } + + lastState = nextState; + } + + a.setAccept(lastState, true); + + return topoSort(a.finish()); + } + + /** + * Just creates a side path from startState to endState with the provided tokens. + */ + private static void addSidePath(Automaton.Builder a, int startState, int endState, char[] tokens) { + int lastState = startState; + for (int i = 0; i < tokens.length; i++) { + int nextState; + if (i == tokens.length - 1) { + nextState = endState; + } else { + nextState = a.createState(); + } + + a.addTransition(lastState, nextState, tokens[i]); + + lastState = nextState; + } + } + + private Automaton toAutomaton(TokenStream ts) throws IOException { + PositionIncrementAttribute posIncAtt = ts.addAttribute(PositionIncrementAttribute.class); + PositionLengthAttribute posLenAtt = ts.addAttribute(PositionLengthAttribute.class); + CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class); + ts.reset(); + Automaton a = new Automaton(); + int srcNode = -1; + int destNode = -1; + int state = a.createState(); + while (ts.incrementToken()) { + assert termAtt.length() == 1; + char c = termAtt.charAt(0); + int posInc = posIncAtt.getPositionIncrement(); + if (posInc != 0) { + srcNode += posInc; + while (state < srcNode) { + state = a.createState(); + } + } + destNode = srcNode + posLenAtt.getPositionLength(); + while (state < destNode) { + state = a.createState(); + } + a.addTransition(srcNode, destNode, c); + } + ts.end(); + ts.close(); + a.finishState(); + a.setAccept(destNode, true); + return a; + } + + /** + * Renumbers nodes according to their topo sort + */ + private Automaton topoSort(Automaton in) { + int[] newToOld = Operations.topoSortStates(in); + int[] oldToNew = new int[newToOld.length]; + + Automaton.Builder a = new Automaton.Builder(); + //System.out.println("remap:"); + for (int i = 0; i < newToOld.length; i++) { + a.createState(); + oldToNew[newToOld[i]] = i; + //System.out.println(" " + newToOld[i] + " -> " + i); + if (in.isAccept(newToOld[i])) { + a.setAccept(i, true); + //System.out.println(" **"); + } + } + + Transition t = new Transition(); + for (int i = 0; i < newToOld.length; i++) { + int count = in.initTransition(newToOld[i], t); + for (int j = 0; j < count; j++) { + in.getNextTransition(t); + a.addTransition(i, oldToNew[t.dest], t.min, t.max); + } + } + + return a.finish(); + } + + /** + * Helper method to validate all strings that can be generated from a token stream. Uses {@link + * TokenStreamToAutomaton} to create an automaton. Asserts the finite strings of the automaton + * are all and only the given valid strings. + * + * @param analyzer analyzer containing the SynonymFilter under test. + * @param text text to be analyzed. + * @param expectedStrings all expected finite strings. + */ + public void assertAllStrings(Analyzer analyzer, String text, String[] expectedStrings) throws IOException { + TokenStream tokenStream = analyzer.tokenStream("dummy", text); + try { + Automaton automaton = new TokenStreamToAutomaton().toAutomaton(tokenStream); + Set finiteStrings = AutomatonTestUtil.getFiniteStringsRecursive(automaton, -1); + + assertEquals("Invalid resulting strings count. Expected " + expectedStrings.length + " was " + finiteStrings.size(), + expectedStrings.length, finiteStrings.size()); + + Set expectedStringsSet = new HashSet<>(Arrays.asList(expectedStrings)); + + BytesRefBuilder scratchBytesRefBuilder = new BytesRefBuilder(); + for (IntsRef ir : finiteStrings) { + String s = Util.toBytesRef(ir, scratchBytesRefBuilder).utf8ToString().replace((char) TokenStreamToAutomaton.POS_SEP, ' '); + assertTrue("Unexpected string found: " + s, expectedStringsSet.contains(s)); + } + } finally { + tokenStream.close(); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 88827f9a644..202224872b4 100644 --- a/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -840,11 +840,9 @@ public class ExceptionSerializationTests extends ESTestCase { public void testShardLockObtainFailedException() throws IOException { ShardId shardId = new ShardId("foo", "_na_", 1); ShardLockObtainFailedException orig = new ShardLockObtainFailedException(shardId, "boom"); - Version version = VersionUtils.randomVersionBetween(random(), - Version.V_5_0_0, Version.CURRENT); - if (version.before(Version.V_5_0_2_UNRELEASED)) { - // remove this once 5_0_2 is released randomVersionBetween asserts that this version is in the constant table.. - version = Version.V_5_0_2_UNRELEASED; + Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT); + if (version.before(Version.V_5_0_2)) { + version = Version.V_5_0_2; } ShardLockObtainFailedException ex = serialize(orig, version); assertEquals(orig.getMessage(), ex.getMessage()); diff --git a/core/src/test/java/org/elasticsearch/VersionTests.java b/core/src/test/java/org/elasticsearch/VersionTests.java index 3bccee941b3..51f7dd8f857 100644 --- a/core/src/test/java/org/elasticsearch/VersionTests.java +++ b/core/src/test/java/org/elasticsearch/VersionTests.java @@ -64,7 +64,37 @@ public class VersionTests extends ESTestCase { assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2"))); assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24"))); assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0"))); + } + public void testMin() { + assertEquals(VersionUtils.getPreviousVersion(), Version.min(Version.CURRENT, VersionUtils.getPreviousVersion())); + assertEquals(Version.fromString("1.0.1"), Version.min(Version.fromString("1.0.1"), Version.CURRENT)); + Version version = VersionUtils.randomVersion(random()); + Version version1 = VersionUtils.randomVersion(random()); + if (version.id <= version1.id) { + assertEquals(version, Version.min(version1, version)); + } else { + assertEquals(version1, Version.min(version1, version)); + } + } + + public void testMax() { + assertEquals(Version.CURRENT, Version.max(Version.CURRENT, VersionUtils.getPreviousVersion())); + assertEquals(Version.CURRENT, Version.max(Version.fromString("1.0.1"), Version.CURRENT)); + Version version = VersionUtils.randomVersion(random()); + Version version1 = VersionUtils.randomVersion(random()); + if (version.id >= version1.id) { + assertEquals(version, Version.max(version1, version)); + } else { + assertEquals(version1, Version.max(version1, version)); + } + } + + public void testMinimumIndexCompatibilityVersion() { + assertEquals(Version.V_5_0_0, Version.V_6_0_0_alpha1_UNRELEASED.minimumIndexCompatibilityVersion()); + assertEquals(Version.V_2_0_0, Version.V_5_0_0.minimumIndexCompatibilityVersion()); + assertEquals(Version.V_2_0_0, Version.V_5_1_1_UNRELEASED.minimumIndexCompatibilityVersion()); + assertEquals(Version.V_2_0_0, Version.V_5_0_0_alpha1.minimumIndexCompatibilityVersion()); } public void testVersionConstantPresent() { diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java index 23fdf3499b2..102e16691d9 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainIT.java @@ -135,20 +135,20 @@ public final class ClusterAllocationExplainIT extends ESIntegTestCase { assertEquals(d.type(), Decision.Type.NO); if (noAttrNode.equals(nodeName)) { - assertThat(d.toString(), containsString("node does not match index include filters [foo:\"bar\"]")); + assertThat(d.toString(), containsString("node does not match [index.routing.allocation.include] filters [foo:\"bar\"]")); assertNull(storeStatus); assertEquals("the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", explanation.getFinalExplanation()); assertEquals(ClusterAllocationExplanation.FinalDecision.NO, finalDecision); } else if (barAttrNode.equals(nodeName)) { - assertThat(d.toString(), containsString("node does not match index include filters [foo:\"bar\"]")); + assertThat(d.toString(), containsString("node does not match [index.routing.allocation.include] filters [foo:\"bar\"]")); barAttrWeight = weight; assertNull(storeStatus); assertEquals("the shard cannot be assigned because one or more allocation decider returns a 'NO' decision", explanation.getFinalExplanation()); assertEquals(ClusterAllocationExplanation.FinalDecision.NO, finalDecision); } else if (fooBarAttrNode.equals(nodeName)) { - assertThat(d.toString(), containsString("the shard cannot be allocated on the same node id")); + assertThat(d.toString(), containsString("the shard cannot be allocated to the same node")); fooBarAttrWeight = weight; assertEquals(storeStatus.getAllocationStatus(), IndicesShardStoresResponse.StoreStatus.AllocationStatus.PRIMARY); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java index 895450e6d5b..329cc3805ab 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.cluster.allocation; -import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -51,12 +50,12 @@ public final class ClusterAllocationExplainTests extends ESSingleNodeTestCase { assertEquals(Decision.Type.NO, d.type()); assertEquals(ClusterAllocationExplanation.FinalDecision.NO, fd); assertEquals(ClusterAllocationExplanation.StoreCopy.AVAILABLE, storeCopy); - assertTrue(d.toString(), d.toString().contains("NO(the shard cannot be allocated on the same node id")); + assertTrue(d.toString(), d.toString().contains("NO(the shard cannot be allocated to the same node")); assertTrue(d instanceof Decision.Multi); Decision.Multi md = (Decision.Multi) d; Decision ssd = md.getDecisions().get(0); assertEquals(Decision.Type.NO, ssd.type()); - assertTrue(ssd.toString(), ssd.toString().contains("NO(the shard cannot be allocated on the same node id")); + assertTrue(ssd.toString(), ssd.toString().contains("NO(the shard cannot be allocated to the same node")); Float weight = explanation.getWeight(); assertNotNull("should have a weight", weight); @@ -78,12 +77,14 @@ public final class ClusterAllocationExplainTests extends ESSingleNodeTestCase { assertEquals(Decision.Type.NO, d.type()); assertEquals(ClusterAllocationExplanation.FinalDecision.ALREADY_ASSIGNED, fd); assertEquals(ClusterAllocationExplanation.StoreCopy.AVAILABLE, storeCopy); - assertTrue(d.toString(), d.toString().contains("NO(the shard cannot be allocated on the same node id")); + assertTrue(d.toString(), d.toString().contains( + "NO(the shard cannot be allocated to the node on which it already exists [[test][0]")); assertTrue(d instanceof Decision.Multi); md = (Decision.Multi) d; ssd = md.getDecisions().get(0); assertEquals(Decision.Type.NO, ssd.type()); - assertTrue(ssd.toString(), ssd.toString().contains("NO(the shard cannot be allocated on the same node id")); + assertTrue(ssd.toString(), ssd.toString().contains( + "NO(the shard cannot be allocated to the node on which it already exists [[test][0]")); weight = explanation.getWeight(); assertNotNull("should have a weight", weight); diff --git a/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java b/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java index 69259f03bc9..9c5b1ae8944 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java @@ -93,7 +93,7 @@ public class ClusterSearchShardsResponseTests extends ESTestCase { assertEquals(clusterSearchShardsGroup.getShardId(), deserializedGroup.getShardId()); assertArrayEquals(clusterSearchShardsGroup.getShards(), deserializedGroup.getShards()); } - if (version.onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (version.onOrAfter(Version.V_5_1_1_UNRELEASED)) { assertEquals(clusterSearchShardsResponse.getIndicesAndFilters(), deserialized.getIndicesAndFilters()); } else { assertNull(deserialized.getIndicesAndFilters()); diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java index 7d36ae14739..9779ce83a65 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/template/put/MetaDataIndexTemplateServiceTests.java @@ -111,7 +111,7 @@ public class MetaDataIndexTemplateServiceTests extends ESSingleNodeTestCase { PutRequest request = new PutRequest("api", "validate_template"); request.patterns(Collections.singletonList("te*")); request.putMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field2").field("type", "string").field("analyzer", "custom_1").endObject() + .startObject("field2").field("type", "text").field("analyzer", "custom_1").endObject() .endObject().endObject().endObject().string()); List errors = putTemplateDetail(request); diff --git a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java index 76447268c7a..1d1532c4919 100644 --- a/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/index/IndexRequestTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.replication.ReplicationResponse; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.seqno.SequenceNumbersService; import org.elasticsearch.index.shard.ShardId; @@ -101,45 +100,6 @@ public class IndexRequestTests extends ESTestCase { assertThat(validate, notNullValue()); assertThat(validate.getMessage(), containsString("id is too long, must be no longer than 512 bytes but was: 513")); -} - - public void testSetTTLAsTimeValue() { - IndexRequest indexRequest = new IndexRequest(); - TimeValue ttl = TimeValue.parseTimeValue(randomTimeValue(), null, "ttl"); - indexRequest.ttl(ttl); - assertThat(indexRequest.ttl(), equalTo(ttl)); - } - - public void testSetTTLAsString() { - IndexRequest indexRequest = new IndexRequest(); - String ttlAsString = randomTimeValue(); - TimeValue ttl = TimeValue.parseTimeValue(ttlAsString, null, "ttl"); - indexRequest.ttl(ttlAsString); - assertThat(indexRequest.ttl(), equalTo(ttl)); - } - - public void testSetTTLAsLong() { - IndexRequest indexRequest = new IndexRequest(); - String ttlAsString = randomTimeValue(); - TimeValue ttl = TimeValue.parseTimeValue(ttlAsString, null, "ttl"); - indexRequest.ttl(ttl.millis()); - assertThat(indexRequest.ttl(), equalTo(ttl)); - } - - public void testValidateTTL() { - IndexRequest indexRequest = new IndexRequest("index", "type"); - if (randomBoolean()) { - indexRequest.ttl(randomIntBetween(Integer.MIN_VALUE, -1)); - } else { - if (randomBoolean()) { - indexRequest.ttl(new TimeValue(randomIntBetween(Integer.MIN_VALUE, -1))); - } else { - indexRequest.ttl(randomIntBetween(Integer.MIN_VALUE, -1) + "ms"); - } - } - ActionRequestValidationException validate = indexRequest.validate(); - assertThat(validate, notNullValue()); - assertThat(validate.getMessage(), containsString("ttl must not be negative")); } public void testWaitForActiveShards() { diff --git a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index 3b27bbff9ce..cdf6a225620 100644 --- a/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -181,48 +181,6 @@ public class UpdateRequestTests extends ESTestCase { assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2")); } - // Related to issue 3256 - public void testUpdateRequestWithTTL() throws Exception { - TimeValue providedTTLValue = TimeValue.parseTimeValue(randomTimeValue(), null, "ttl"); - Settings settings = settings(Version.CURRENT).build(); - - UpdateHelper updateHelper = new UpdateHelper(settings, null); - - // We just upsert one document with ttl - IndexRequest indexRequest = new IndexRequest("test", "type1", "1") - .source(jsonBuilder().startObject().field("foo", "bar").endObject()) - .ttl(providedTTLValue); - UpdateRequest updateRequest = new UpdateRequest("test", "type1", "1") - .doc(jsonBuilder().startObject().field("fooz", "baz").endObject()) - .upsert(indexRequest); - - long nowInMillis = randomPositiveLong(); - // We simulate that the document is not existing yet - GetResult getResult = new GetResult("test", "type1", "1", 0, false, null, null); - UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0),updateRequest, getResult, () -> nowInMillis); - Streamable action = result.action(); - assertThat(action, instanceOf(IndexRequest.class)); - IndexRequest indexAction = (IndexRequest) action; - assertThat(indexAction.ttl(), is(providedTTLValue)); - - // We just upsert one document with ttl using a script - indexRequest = new IndexRequest("test", "type1", "2") - .source(jsonBuilder().startObject().field("foo", "bar").endObject()) - .ttl(providedTTLValue); - updateRequest = new UpdateRequest("test", "type1", "2") - .upsert(indexRequest) - .script(new Script(";")) - .scriptedUpsert(true); - - // We simulate that the document is not existing yet - getResult = new GetResult("test", "type1", "2", 0, false, null, null); - result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> nowInMillis); - action = result.action(); - assertThat(action, instanceOf(IndexRequest.class)); - indexAction = (IndexRequest) action; - assertThat(indexAction.ttl(), is(providedTTLValue)); - } - // Related to issue #15822 public void testInvalidBodyThrowsParseException() throws Exception { UpdateRequest request = new UpdateRequest("test", "type", "1"); @@ -312,15 +270,13 @@ public class UpdateRequestTests extends ESTestCase { ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); ScriptService scriptService = new ScriptService(baseSettings, environment, new ResourceWatcherService(baseSettings, null), scriptEngineRegistry, scriptContextRegistry, scriptSettings); - TimeValue providedTTLValue = TimeValue.parseTimeValue(randomTimeValue(), null, "ttl"); Settings settings = settings(Version.CURRENT).build(); UpdateHelper updateHelper = new UpdateHelper(settings, scriptService); // We just upsert one document with now() using a script IndexRequest indexRequest = new IndexRequest("test", "type1", "2") - .source(jsonBuilder().startObject().field("foo", "bar").endObject()) - .ttl(providedTTLValue); + .source(jsonBuilder().startObject().field("foo", "bar").endObject()); { UpdateRequest updateRequest = new UpdateRequest("test", "type1", "2") @@ -341,14 +297,11 @@ public class UpdateRequestTests extends ESTestCase { .upsert(indexRequest) .script(new Script(ScriptType.INLINE, "mock", "ctx._timestamp = ctx._now", Collections.emptyMap())) .scriptedUpsert(true); - long nowInMillis = randomPositiveLong(); // We simulate that the document is not existing yet GetResult getResult = new GetResult("test", "type1", "2", 0, true, new BytesArray("{}"), null); - UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> nowInMillis); + UpdateHelper.Result result = updateHelper.prepare(new ShardId("test", "_na_", 0), updateRequest, getResult, () -> 42L); Streamable action = result.action(); assertThat(action, instanceOf(IndexRequest.class)); - IndexRequest indexAction = (IndexRequest) action; - assertEquals(indexAction.timestamp(), Long.toString(nowInMillis)); } } } diff --git a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java index cea041d7777..1fd71c7ae51 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/OldIndexBackwardsCompatibilityIT.java @@ -34,6 +34,7 @@ import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.routing.RecoverySource; @@ -49,7 +50,6 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Segment; -import org.elasticsearch.index.mapper.StringFieldMapperPositionIncrementGapTests; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.node.Node; @@ -82,6 +82,7 @@ import java.util.Map; import java.util.SortedSet; import java.util.TreeSet; +import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; import static org.elasticsearch.test.OldIndexUtils.assertUpgradeWorks; import static org.elasticsearch.test.OldIndexUtils.getIndexDir; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -188,7 +189,7 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { for (Version v : VersionUtils.allReleasedVersions()) { if (VersionUtils.isSnapshot(v)) continue; // snapshots are unreleased, so there is no backcompat yet if (v.isRelease() == false) continue; // no guarantees for prereleases - if (v.onOrBefore(Version.V_2_0_0_beta1)) continue; // we can only test back one major lucene version + if (v.before(Version.CURRENT.minimumIndexCompatibilityVersion())) continue; // we can only support one major version backward if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself expectedVersions.add("index-" + v.toString() + ".zip"); } @@ -425,11 +426,31 @@ public class OldIndexBackwardsCompatibilityIT extends ESIntegTestCase { } void assertPositionIncrementGapDefaults(String indexName, Version version) throws Exception { - if (version.before(Version.V_2_0_0_beta1)) { - StringFieldMapperPositionIncrementGapTests.assertGapIsZero(client(), indexName, "doc"); - } else { - StringFieldMapperPositionIncrementGapTests.assertGapIsOneHundred(client(), indexName, "doc"); - } + client().prepareIndex(indexName, "doc", "position_gap_test").setSource("string", Arrays.asList("one", "two three")) + .setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); + + // Baseline - phrase query finds matches in the same field value + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "two three")).get(), 1); + + // No match across gaps when slop < position gap + assertHitCount( + client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(99)).get(), + 0); + + // Match across gaps when slop >= position gap + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(100)).get(), 1); + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(101)).get(), + 1); + + // No match across gap using default slop with default positionIncrementGap + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two")).get(), 0); + + // Nor with small-ish values + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(5)).get(), 0); + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(50)).get(), 0); + + // But huge-ish values still match + assertHitCount(client().prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(500)).get(), 1); } private static final Version VERSION_5_1_0_UNRELEASED = Version.fromString("5.1.0"); diff --git a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java index 16321dbd9fe..5a46f50382d 100644 --- a/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java +++ b/core/src/test/java/org/elasticsearch/bwcompat/RestoreBackwardsCompatIT.java @@ -45,7 +45,6 @@ import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; @@ -98,7 +97,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { for (Version v : VersionUtils.allReleasedVersions()) { if (VersionUtils.isSnapshot(v)) continue; // snapshots are unreleased, so there is no backcompat yet if (v.isRelease() == false) continue; // no guarantees for prereleases - if (v.onOrBefore(Version.V_2_0_0_beta1)) continue; // we can only test back one major lucene version + if (v.before(Version.CURRENT.minimumIndexCompatibilityVersion())) continue; // we only support versions N and N-1 if (v.equals(Version.CURRENT)) continue; // the current version is always compatible with itself expectedVersions.add(v.toString()); } @@ -128,44 +127,6 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { } } - public void testRestoreSnapshotWithMissingChecksum() throws Exception { - final String repo = "test_repo"; - final String snapshot = "test_1"; - final String indexName = "index-2.3.4"; - final String repoFileId = "missing-checksum-repo-2.3.4"; - Path repoFile = getBwcIndicesPath().resolve(repoFileId + ".zip"); - URI repoFileUri = repoFile.toUri(); - URI repoJarUri = new URI("jar:" + repoFileUri.toString() + "!/repo/"); - logger.info("--> creating repository [{}] for repo file [{}]", repo, repoFileId); - assertAcked(client().admin().cluster().preparePutRepository(repo) - .setType("url") - .setSettings(Settings.builder().put("url", repoJarUri.toString()))); - - logger.info("--> get snapshot and check its indices"); - GetSnapshotsResponse getSnapshotsResponse = client().admin().cluster().prepareGetSnapshots(repo).setSnapshots(snapshot).get(); - assertThat(getSnapshotsResponse.getSnapshots().size(), equalTo(1)); - SnapshotInfo snapshotInfo = getSnapshotsResponse.getSnapshots().get(0); - assertThat(snapshotInfo.indices(), equalTo(Arrays.asList(indexName))); - - logger.info("--> restoring snapshot"); - RestoreSnapshotResponse response = client().admin().cluster().prepareRestoreSnapshot(repo, snapshot).setRestoreGlobalState(true).setWaitForCompletion(true).get(); - assertThat(response.status(), equalTo(RestStatus.OK)); - RestoreInfo restoreInfo = response.getRestoreInfo(); - assertThat(restoreInfo.successfulShards(), greaterThan(0)); - assertThat(restoreInfo.successfulShards(), equalTo(restoreInfo.totalShards())); - assertThat(restoreInfo.failedShards(), equalTo(0)); - String index = restoreInfo.indices().get(0); - assertThat(index, equalTo(indexName)); - - logger.info("--> check search"); - SearchResponse searchResponse = client().prepareSearch(index).get(); - assertThat(searchResponse.getHits().totalHits(), greaterThan(0L)); - - logger.info("--> cleanup"); - cluster().wipeIndices(restoreInfo.indices().toArray(new String[restoreInfo.indices().size()])); - cluster().wipeTemplates(); - } - private List repoVersions() throws Exception { return listRepoVersions("repo"); } @@ -245,7 +206,7 @@ public class RestoreBackwardsCompatIT extends AbstractSnapshotIntegTestCase { logger.info("--> restoring unsupported snapshot"); try { client().admin().cluster().prepareRestoreSnapshot(repo, snapshot).setRestoreGlobalState(true).setWaitForCompletion(true).get(); - fail("should have failed to restore"); + fail("should have failed to restore - " + repo); } catch (SnapshotRestoreException ex) { assertThat(ex.getMessage(), containsString("cannot restore index")); assertThat(ex.getMessage(), containsString("because it cannot be upgraded")); diff --git a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java index 908d2eb6d1e..0416f6aff41 100644 --- a/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java +++ b/core/src/test/java/org/elasticsearch/client/transport/FailAndRetryMockTransport.java @@ -26,13 +26,13 @@ import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.component.Lifecycle; import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; @@ -153,23 +153,13 @@ abstract class FailAndRetryMockTransport imp throw new UnsupportedOperationException(); } - @Override - public boolean addressSupported(Class address) { - throw new UnsupportedOperationException(); - } - @Override public boolean nodeConnected(DiscoveryNode node) { return false; } @Override - public void connectToNode(DiscoveryNode node) throws ConnectTransportException { - - } - - @Override - public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException { + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) throws ConnectTransportException { } diff --git a/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java index 5dcbefbe034..7372218a9ed 100644 --- a/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/NodeConnectionsServiceTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; @@ -175,7 +176,6 @@ public class NodeConnectionsServiceTests extends ESTestCase { @Override public void transportServiceAdapter(TransportServiceAdapter service) { - } @Override @@ -193,27 +193,19 @@ public class NodeConnectionsServiceTests extends ESTestCase { return new TransportAddress[0]; } - @Override - public boolean addressSupported(Class address) { - return false; - } - @Override public boolean nodeConnected(DiscoveryNode node) { return connectedNodes.contains(node); } @Override - public void connectToNode(DiscoveryNode node) throws ConnectTransportException { - if (connectedNodes.contains(node) == false && randomConnectionExceptions && randomBoolean()) { - throw new ConnectTransportException(node, "simulated"); + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) throws ConnectTransportException { + if (connectionProfile == null) { + if (connectedNodes.contains(node) == false && randomConnectionExceptions && randomBoolean()) { + throw new ConnectTransportException(node, "simulated"); + } + connectedNodes.add(node); } - connectedNodes.add(node); - } - - @Override - public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException { - } @Override diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java index 376feb305a1..a1dfdbc74f9 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -60,11 +60,11 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); assertFalse(service.isUpgraded(src)); - src = service.upgradeIndexMetaData(src); + src = service.upgradeIndexMetaData(src, Version.CURRENT.minimumIndexCompatibilityVersion()); assertTrue(service.isUpgraded(src)); assertEquals("-200", src.getSettings().get("archived.index.refresh_interval")); assertNull(src.getSettings().get("index.refresh_interval")); - assertSame(src, service.upgradeIndexMetaData(src)); // no double upgrade + assertSame(src, service.upgradeIndexMetaData(src, Version.CURRENT.minimumIndexCompatibilityVersion())); // no double upgrade } public void testIsUpgraded() { @@ -83,20 +83,21 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS); final IndexMetaData metaData = newIndexMeta("foo", Settings.builder() - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_2_0_0_beta1) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromString("1.7.0")) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromString("2.4.0")) .put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, Version.CURRENT.luceneVersion.toString()).build()); - String message = expectThrows(IllegalStateException.class, () -> service.upgradeIndexMetaData(metaData)).getMessage(); - assertEquals(message, "The index [[foo/BOOM]] was created before v2.0.0.beta1. It should be reindexed in Elasticsearch 2.x " + - "before upgrading to " + Version.CURRENT.toString() + "."); + String message = expectThrows(IllegalStateException.class, () -> service.upgradeIndexMetaData(metaData, + Version.CURRENT.minimumIndexCompatibilityVersion())).getMessage(); + assertEquals(message, "The index [[foo/BOOM]] was created with version [2.4.0] but the minimum compatible version is [5.0.0]." + + " It should be re-indexed in Elasticsearch 5.x before upgrading to " + Version.CURRENT.toString() + "."); IndexMetaData goodMeta = newIndexMeta("foo", Settings.builder() - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_2_0_0_beta1) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromString("2.1.0")) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.fromString("5.1.0")) .put(IndexMetaData.SETTING_VERSION_MINIMUM_COMPATIBLE, Version.CURRENT.luceneVersion.toString()).build()); - service.upgradeIndexMetaData(goodMeta); + service.upgradeIndexMetaData(goodMeta, Version.V_5_0_0.minimumIndexCompatibilityVersion()); } public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { @@ -105,7 +106,7 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_CREATION_DATE, 1) .put(IndexMetaData.SETTING_INDEX_UUID, "BOOM") - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_2_0_0_beta1) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) .put(indexSettings) .build(); IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java new file mode 100644 index 00000000000..5178c5f3fc8 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodeTests.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.cluster.node; + +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.test.ESTestCase; + +import java.net.InetAddress; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; + +public class DiscoveryNodeTests extends ESTestCase { + + public void testDiscoveryNodeIsCreatedWithHostFromInetAddress() throws Exception { + InetAddress inetAddress = randomBoolean() ? InetAddress.getByName("192.0.2.1") : + InetAddress.getByAddress("name1", new byte[] { (byte) 192, (byte) 168, (byte) 0, (byte) 1}); + TransportAddress transportAddress = new TransportAddress(inetAddress, randomIntBetween(0, 65535)); + DiscoveryNode node = new DiscoveryNode("name1", "id1", transportAddress, emptyMap(), emptySet(), Version.CURRENT); + assertEquals(transportAddress.address().getHostString(), node.getHostName()); + assertEquals(transportAddress.getAddress(), node.getHostAddress()); + } + + public void testDiscoveryNodeSerializationKeepsHost() throws Exception { + InetAddress inetAddress = InetAddress.getByAddress("name1", new byte[] { (byte) 192, (byte) 168, (byte) 0, (byte) 1}); + TransportAddress transportAddress = new TransportAddress(inetAddress, randomIntBetween(0, 65535)); + DiscoveryNode node = new DiscoveryNode("name1", "id1", transportAddress, emptyMap(), emptySet(), Version.CURRENT); + + BytesStreamOutput streamOutput = new BytesStreamOutput(); + streamOutput.setVersion(Version.CURRENT); + node.writeTo(streamOutput); + + StreamInput in = StreamInput.wrap(streamOutput.bytes().toBytesRef().bytes); + DiscoveryNode serialized = new DiscoveryNode(in); + assertEquals(transportAddress.address().getHostString(), serialized.getHostName()); + assertEquals(transportAddress.address().getHostString(), serialized.getAddress().address().getHostString()); + assertEquals(transportAddress.getAddress(), serialized.getHostAddress()); + assertEquals(transportAddress.getAddress(), serialized.getAddress().getAddress()); + assertEquals(transportAddress.getPort(), serialized.getAddress().getPort()); + } + + public void testDiscoveryNodeSerializationToOldVersion() throws Exception { + InetAddress inetAddress = InetAddress.getByAddress("name1", new byte[] { (byte) 192, (byte) 168, (byte) 0, (byte) 1}); + TransportAddress transportAddress = new TransportAddress(inetAddress, randomIntBetween(0, 65535)); + DiscoveryNode node = new DiscoveryNode("name1", "id1", transportAddress, emptyMap(), emptySet(), Version.CURRENT); + + BytesStreamOutput streamOutput = new BytesStreamOutput(); + streamOutput.setVersion(Version.V_5_0_0); + node.writeTo(streamOutput); + + StreamInput in = StreamInput.wrap(streamOutput.bytes().toBytesRef().bytes); + in.setVersion(Version.V_5_0_0); + DiscoveryNode serialized = new DiscoveryNode(in); + assertEquals(transportAddress.address().getHostString(), serialized.getHostName()); + assertNotEquals(transportAddress.address().getHostString(), serialized.getAddress().address().getHostString()); + assertEquals(transportAddress.getAddress(), serialized.getHostAddress()); + assertEquals(transportAddress.getAddress(), serialized.getAddress().getAddress()); + assertEquals(transportAddress.getPort(), serialized.getAddress().getPort()); + } +} diff --git a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java index 342919fb881..4ad1c5fdd08 100644 --- a/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/node/DiscoveryNodesTests.java @@ -22,6 +22,7 @@ package org.elasticsearch.cluster.node; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.elasticsearch.Version; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import java.util.ArrayList; import java.util.Arrays; @@ -250,4 +251,22 @@ public class DiscoveryNodesTests extends ESTestCase { abstract Set matchingNodeIds(DiscoveryNodes nodes); } + + public void testMaxMinNodeVersion() { + DiscoveryNodes.Builder discoBuilder = DiscoveryNodes.builder(); + discoBuilder.add(new DiscoveryNode("name_" + 1, "node_" + 1, buildNewFakeTransportAddress(), Collections.emptyMap(), + new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values()))), + Version.fromString("5.1.0"))); + discoBuilder.add(new DiscoveryNode("name_" + 2, "node_" + 2, buildNewFakeTransportAddress(), Collections.emptyMap(), + new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values()))), + Version.fromString("6.3.0"))); + discoBuilder.add(new DiscoveryNode("name_" + 3, "node_" + 3, buildNewFakeTransportAddress(), Collections.emptyMap(), + new HashSet<>(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values()))), + Version.fromString("1.1.0"))); + discoBuilder.localNodeId("name_1"); + discoBuilder.masterNodeId("name_2"); + DiscoveryNodes build = discoBuilder.build(); + assertEquals( Version.fromString("6.3.0"), build.getMaxNodeVersion()); + assertEquals( Version.fromString("1.1.0"), build.getMinNodeVersion()); + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterAllocationDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterAllocationDeciderTests.java index 3a792ae991c..5ec162eb719 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterAllocationDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/FilterAllocationDeciderTests.java @@ -28,9 +28,11 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.elasticsearch.cluster.routing.allocation.decider.AllocationDeciders; import org.elasticsearch.cluster.routing.allocation.decider.Decision; +import org.elasticsearch.cluster.routing.allocation.decider.Decision.Type; import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.ReplicaAfterPrimaryActiveAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.SameShardAllocationDecider; @@ -74,12 +76,23 @@ public class FilterAllocationDeciderTests extends ESAllocationTestCase { // after failing the shard we are unassigned since the node is blacklisted and we can't initialize on the other node RoutingAllocation allocation = new RoutingAllocation(allocationDeciders, state.getRoutingNodes(), state, null, 0, false); - assertEquals(filterAllocationDecider.canAllocate(routingTable.index("idx").shard(0).primaryShard(), - state.getRoutingNodes().node("node2") - , allocation), Decision.YES); - assertEquals(filterAllocationDecider.canAllocate(routingTable.index("idx").shard(0).primaryShard(), - state.getRoutingNodes().node("node1") - , allocation), Decision.NO); + allocation.debugDecision(true); + Decision.Single decision = (Decision.Single) filterAllocationDecider.canAllocate( + routingTable.index("idx").shard(0).primaryShard(), + state.getRoutingNodes().node("node2"), allocation); + assertEquals(Type.YES, decision.type()); + assertEquals("node passes include/exclude/require filters", decision.getExplanation()); + ShardRouting primaryShard = routingTable.index("idx").shard(0).primaryShard(); + decision = (Decision.Single) filterAllocationDecider.canAllocate( + routingTable.index("idx").shard(0).primaryShard(), + state.getRoutingNodes().node("node1"), allocation); + assertEquals(Type.NO, decision.type()); + if (primaryShard.recoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) { + assertEquals("initial allocation of the shrunken index is only allowed on nodes [_id:\"node2\"] that " + + "hold a copy of every shard in the index", decision.getExplanation()); + } else { + assertEquals("initial allocation of the index is only allowed on nodes [_id:\"node2\"]", decision.getExplanation()); + } state = service.reroute(state, "try allocate again"); routingTable = state.routingTable(); @@ -114,12 +127,17 @@ public class FilterAllocationDeciderTests extends ESAllocationTestCase { allocation = new RoutingAllocation(allocationDeciders, state.getRoutingNodes(), state, null, 0, false); - assertEquals(filterAllocationDecider.canAllocate(routingTable.index("idx").shard(0).shards().get(0), - state.getRoutingNodes().node("node2") - , allocation), Decision.YES); - assertEquals(filterAllocationDecider.canAllocate(routingTable.index("idx").shard(0).shards().get(0), - state.getRoutingNodes().node("node1") - , allocation), Decision.YES); + allocation.debugDecision(true); + decision = (Decision.Single) filterAllocationDecider.canAllocate( + routingTable.index("idx").shard(0).shards().get(0), + state.getRoutingNodes().node("node2"), allocation); + assertEquals(Type.YES, decision.type()); + assertEquals("node passes include/exclude/require filters", decision.getExplanation()); + decision = (Decision.Single) filterAllocationDecider.canAllocate( + routingTable.index("idx").shard(0).shards().get(0), + state.getRoutingNodes().node("node1"), allocation); + assertEquals(Type.YES, decision.type()); + assertEquals("node passes include/exclude/require filters", decision.getExplanation()); } private ClusterState createInitialClusterState(AllocationService service, Settings settings) { diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java index c80cc9a26b9..3aba43f366a 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.cluster.routing.RoutingNodes; import org.elasticsearch.cluster.routing.RoutingTable; @@ -783,9 +784,10 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { strategy.reroute(clusterState, cmds, false, false); fail("should not have been able to reroute the shard"); } catch (IllegalArgumentException e) { - assertThat("can't allocated because there isn't enough room: " + e.getMessage(), + assertThat("can't be allocated because there isn't enough room: " + e.getMessage(), e.getMessage(), - containsString("the node is above the low watermark and has more than allowed [70.0%] used disk, free: [26.0%]")); + containsString("the node is above the low watermark [cluster.routing.allocation.disk.watermark.low=0.7], using " + + "more disk space than the maximum allowed [70.0%], actual free: [26.0%]")); } } @@ -852,8 +854,13 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ClusterState clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); RoutingAllocation routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime(), false); + routingAllocation.debugDecision(true); Decision decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); assertThat(decision.type(), equalTo(Decision.Type.NO)); + assertThat(((Decision.Single) decision).getExplanation(), containsString( + "the shard cannot remain on this node because it is above the high watermark " + + "[cluster.routing.allocation.disk.watermark.high=70%] and there is less than the required [30.0%] free disk on node, " + + "actual free: [20.0%]")); // Two shards consuming each 80% of disk space while 70% is allowed, but one is relocating, so shard 0 can stay firstRouting = TestShardRouting.newShardRouting("test", 0, "node1", null, true, ShardRoutingState.STARTED); @@ -874,10 +881,22 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime(), false); + routingAllocation.debugDecision(true); decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); assertThat(decision.type(), equalTo(Decision.Type.YES)); + assertEquals("there is enough disk on this node for the shard to remain, free: [60b]", + ((Decision.Single) decision).getExplanation()); decision = diskThresholdDecider.canAllocate(fooRouting, firstRoutingNode, routingAllocation); assertThat(decision.type(), equalTo(Decision.Type.NO)); + if (fooRouting.recoverySource().getType() == RecoverySource.Type.EMPTY_STORE) { + assertThat(((Decision.Single) decision).getExplanation(), containsString( + "the node is above the high watermark [cluster.routing.allocation.disk.watermark.high=70%], using more disk space than " + + "the maximum allowed [70.0%], actual free: [20.0%]")); + } else { + assertThat(((Decision.Single) decision).getExplanation(), containsString( + "the node is above the low watermark [cluster.routing.allocation.disk.watermark.low=60%], using more disk space than " + + "the maximum allowed [60.0%], actual free: [20.0%]")); + } // Creating AllocationService instance and the services it depends on... ClusterInfoService cis = new ClusterInfoService() { @@ -972,10 +991,12 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { ClusterState clusterState = ClusterState.builder(baseClusterState).routingTable(builder.build()).build(); RoutingAllocation routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime(), false); + routingAllocation.debugDecision(true); Decision decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); // Two shards should start happily assertThat(decision.type(), equalTo(Decision.Type.YES)); + assertThat(((Decision.Single) decision).getExplanation(), containsString("there is only a single data node present")); ClusterInfoService cis = new ClusterInfoService() { @Override public ClusterInfo getClusterInfo() { @@ -1032,8 +1053,11 @@ public class DiskThresholdDeciderTests extends ESAllocationTestCase { clusterState = ClusterState.builder(updateClusterState).routingTable(builder.build()).build(); routingAllocation = new RoutingAllocation(null, new RoutingNodes(clusterState), clusterState, clusterInfo, System.nanoTime(), false); + routingAllocation.debugDecision(true); decision = diskThresholdDecider.canRemain(firstRouting, firstRoutingNode, routingAllocation); assertThat(decision.type(), equalTo(Decision.Type.YES)); + assertThat(((Decision.Single) decision).getExplanation(), containsString( + "there is enough disk on this node for the shard to remain, free: [60b]")); result = strategy.reroute(clusterState, "reroute"); assertThat(result.routingTable().index("test").getShards().get(0).primaryShard().state(), equalTo(STARTED)); diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index d3e9259994c..659c3b25833 100644 --- a/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -52,6 +52,7 @@ import java.util.HashSet; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.containsString; /** * Unit tests for the DiskThresholdDecider @@ -98,8 +99,15 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { shardSizes.put("[test][0][p]", 10L); // 10 bytes final ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages.build(), mostAvailableUsage.build(), shardSizes.build(), ImmutableOpenMap.of()); RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.singleton(decider)), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime(), false); - assertEquals(mostAvailableUsage.toString(), Decision.YES, decider.canAllocate(test_0, new RoutingNode("node_0", node_0), allocation)); - assertEquals(mostAvailableUsage.toString(), Decision.NO, decider.canAllocate(test_0, new RoutingNode("node_1", node_1), allocation)); + allocation.debugDecision(true); + Decision decision = decider.canAllocate(test_0, new RoutingNode("node_0", node_0), allocation); + assertEquals(mostAvailableUsage.toString(), Decision.Type.YES, decision.type()); + assertThat(((Decision.Single) decision).getExplanation(), containsString("enough disk for shard on node")); + decision = decider.canAllocate(test_0, new RoutingNode("node_1", node_1), allocation); + assertEquals(mostAvailableUsage.toString(), Decision.Type.NO, decision.type()); + assertThat(((Decision.Single) decision).getExplanation(), containsString( + "the node is above the high watermark [cluster.routing.allocation.disk.watermark.high=90%], using more disk space than " + + "the maximum allowed [90.0%]")); } public void testCanRemainUsesLeastAvailableSpace() { @@ -165,8 +173,16 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { final ClusterInfo clusterInfo = new ClusterInfo(leastAvailableUsages.build(), mostAvailableUsage.build(), shardSizes.build(), shardRoutingMap.build()); RoutingAllocation allocation = new RoutingAllocation(new AllocationDeciders(Settings.EMPTY, Collections.singleton(decider)), clusterState.getRoutingNodes(), clusterState, clusterInfo, System.nanoTime(), false); - assertEquals(Decision.YES, decider.canRemain(test_0, new RoutingNode("node_0", node_0), allocation)); - assertEquals(Decision.NO, decider.canRemain(test_1, new RoutingNode("node_1", node_1), allocation)); + allocation.debugDecision(true); + Decision decision = decider.canRemain(test_0, new RoutingNode("node_0", node_0), allocation); + assertEquals(Decision.Type.YES, decision.type()); + assertThat(((Decision.Single) decision).getExplanation(), containsString( + "there is enough disk on this node for the shard to remain, free: [10b]")); + decision = decider.canRemain(test_1, new RoutingNode("node_1", node_1), allocation); + assertEquals(Decision.Type.NO, decision.type()); + assertThat(((Decision.Single) decision).getExplanation(), containsString("the shard cannot remain on this node because it is " + + "above the high watermark [cluster.routing.allocation.disk.watermark.high=90%] and there is less than the required [10.0%] " + + "free disk on node, actual free: [9.0%]")); try { decider.canRemain(test_0, new RoutingNode("node_1", node_1), allocation); fail("not allocated on this node"); @@ -180,9 +196,15 @@ public class DiskThresholdDeciderUnitTests extends ESAllocationTestCase { // not allocated on that node } - assertEquals("can stay since allocated on a different path with enough space", Decision.YES, decider.canRemain(test_2, new RoutingNode("node_1", node_1), allocation)); + decision = decider.canRemain(test_2, new RoutingNode("node_1", node_1), allocation); + assertEquals("can stay since allocated on a different path with enough space", Decision.Type.YES, decision.type()); + assertThat(((Decision.Single) decision).getExplanation(), containsString( + "this shard is not allocated on the most utilized disk and can remain")); - assertEquals("can stay since we don't have information about this shard", Decision.YES, decider.canRemain(test_2, new RoutingNode("node_1", node_1), allocation)); + decision = decider.canRemain(test_2, new RoutingNode("node_1", node_1), allocation); + assertEquals("can stay since we don't have information about this shard", Decision.Type.YES, decision.type()); + assertThat(((Decision.Single) decision).getExplanation(), containsString( + "this shard is not allocated on the most utilized disk and can remain")); } diff --git a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java b/core/src/test/java/org/elasticsearch/codecs/CodecTests.java deleted file mode 100644 index dac83d1dee8..00000000000 --- a/core/src/test/java/org/elasticsearch/codecs/CodecTests.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.codecs; - -import org.apache.lucene.codecs.Codec; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; -import org.junit.Assert; - -import java.io.IOException; -import java.util.Collection; - -import static org.hamcrest.Matchers.containsString; - -public class CodecTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testAcceptPostingsFormat() throws IOException { - int i = 0; - for (Version v : VersionUtils.allReleasedVersions()) { - if (v.onOrAfter(Version.V_2_0_0) == false) { - // no need to test, we don't support upgrading from these versions - continue; - } - IndexService indexService = createIndex("test-" + i++, - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build()); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - try { - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("field") - .field("type", v.onOrAfter(Version.V_5_0_0_alpha1) ? "keyword" : "string") - .field("postings_format", Codec.getDefault().postingsFormat().getName()) - .endObject() - .endObject() - .endObject().endObject().string(); - parser.parse("type", new CompressedXContent(mapping)); - if (v.onOrAfter(Version.V_2_0_0_beta1)) { - fail("Elasticsearch 2.0 should not support custom postings formats"); - } - } catch (MapperParsingException e) { - if (v.before(Version.V_2_0_0_beta1)) { - // Elasticsearch 1.x should ignore custom postings formats - throw e; - } - Assert.assertThat(e.getMessage(), containsString("unsupported parameters: [postings_format")); - } - } - } - - public void testAcceptDocValuesFormat() throws IOException { - int i = 0; - for (Version v : VersionUtils.allReleasedVersions()) { - if (v.onOrAfter(Version.V_2_0_0) == false) { - // no need to test, we don't support upgrading from these versions - continue; - } - IndexService indexService = createIndex("test-" + i++, - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, v).build()); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("field") - .field("type", v.onOrAfter(Version.V_5_0_0_alpha1) ? "keyword" : "string") - .field("doc_values_format", Codec.getDefault().docValuesFormat().getName()) - .endObject() - .endObject() - .endObject().endObject().string(); - try { - parser.parse("type", new CompressedXContent(mapping)); - if (v.onOrAfter(Version.V_2_0_0_beta1)) { - fail("Elasticsearch 2.0 should not support custom postings formats"); - } - } catch (MapperParsingException e) { - if (v.before(Version.V_2_0_0_beta1)) { - // Elasticsearch 1.x should ignore custom postings formats - throw e; - } - Assert.assertThat(e.getMessage(), containsString("unsupported parameters: [doc_values_format")); - } - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java index 1688157c0d1..c5fa05d6635 100644 --- a/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java +++ b/core/src/test/java/org/elasticsearch/discovery/DiscoveryWithServiceDisruptionsIT.java @@ -628,7 +628,7 @@ public class DiscoveryWithServiceDisruptionsIT extends ESIntegTestCase { String oldMasterNode = internalCluster().getMasterName(); // a very long GC, but it's OK as we remove the disruption when it has had an effect - SingleNodeDisruption masterNodeDisruption = new IntermittentLongGCDisruption(oldMasterNode, random(), 100, 200, 30000, 60000); + SingleNodeDisruption masterNodeDisruption = new IntermittentLongGCDisruption(random(), oldMasterNode, 100, 200, 30000, 60000); internalCluster().setDisruptionScheme(masterNodeDisruption); masterNodeDisruption.startDisrupting(); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 5a91426d6bc..c8f68e2a44f 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -225,7 +225,7 @@ public class UnicastZenPingTests extends ESTestCase { closeables.push(zenPingD); logger.info("ping from UZP_A"); - Collection pingResponses = zenPingA.pingAndWait(TimeValue.timeValueSeconds(1)); + Collection pingResponses = zenPingA.pingAndWait(TimeValue.timeValueMillis(100)); assertThat(pingResponses.size(), equalTo(1)); ZenPing.PingResponse ping = pingResponses.iterator().next(); assertThat(ping.node().getId(), equalTo("UZP_B")); @@ -234,7 +234,7 @@ public class UnicastZenPingTests extends ESTestCase { // ping again, this time from B, logger.info("ping from UZP_B"); - pingResponses = zenPingB.pingAndWait(TimeValue.timeValueSeconds(1)); + pingResponses = zenPingB.pingAndWait(TimeValue.timeValueMillis(100)); assertThat(pingResponses.size(), equalTo(1)); ping = pingResponses.iterator().next(); assertThat(ping.node().getId(), equalTo("UZP_A")); @@ -242,12 +242,12 @@ public class UnicastZenPingTests extends ESTestCase { assertCounters(handleB, handleA, handleB, handleC, handleD); logger.info("ping from UZP_C"); - pingResponses = zenPingC.pingAndWait(TimeValue.timeValueSeconds(1)); + pingResponses = zenPingC.pingAndWait(TimeValue.timeValueMillis(100)); assertThat(pingResponses.size(), equalTo(0)); assertCounters(handleC, handleA, handleB, handleC, handleD); logger.info("ping from UZP_D"); - pingResponses = zenPingD.pingAndWait(TimeValue.timeValueSeconds(1)); + pingResponses = zenPingD.pingAndWait(TimeValue.timeValueMillis(100)); assertThat(pingResponses.size(), equalTo(0)); assertCounters(handleD, handleA, handleB, handleC, handleD); } @@ -347,7 +347,7 @@ public class UnicastZenPingTests extends ESTestCase { // the presence of an unresolvable host should not prevent resolvable hosts from being pinged { - final Collection pingResponses = zenPingA.pingAndWait(TimeValue.timeValueSeconds(3)); + final Collection pingResponses = zenPingA.pingAndWait(TimeValue.timeValueMillis(100)); assertThat(pingResponses.size(), equalTo(1)); ZenPing.PingResponse ping = pingResponses.iterator().next(); assertThat(ping.node().getId(), equalTo("UZP_C")); @@ -366,7 +366,7 @@ public class UnicastZenPingTests extends ESTestCase { // now we should see pings to UZP_B; this establishes that host resolutions are not cached { // ping from C so that we can assert on the counters from a fresh source (as opposed to resetting them) - final Collection secondPingResponses = zenPingC.pingAndWait(TimeValue.timeValueSeconds(3)); + final Collection secondPingResponses = zenPingC.pingAndWait(TimeValue.timeValueMillis(100)); assertThat(secondPingResponses.size(), equalTo(2)); final Set ids = new HashSet<>(secondPingResponses.stream().map(p -> p.node().getId()).collect(Collectors.toList())); assertThat(ids, equalTo(new HashSet<>(Arrays.asList("UZP_A", "UZP_B")))); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index acc5d4e8018..b46df47afbc 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -20,13 +20,16 @@ package org.elasticsearch.discovery.zen; import java.io.Closeable; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; import org.apache.lucene.util.IOUtils; @@ -35,22 +38,40 @@ import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode.Role; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.IndexRoutingTable; +import org.elasticsearch.cluster.routing.IndexShardRoutingTable; +import org.elasticsearch.cluster.routing.RoutingTable; +import org.elasticsearch.cluster.routing.ShardRoutingState; +import org.elasticsearch.cluster.routing.TestShardRouting; +import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.zen.PublishClusterStateActionTests.AssertingAckListener; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportResponse; +import org.elasticsearch.transport.TransportResponseOptions; import org.elasticsearch.transport.TransportService; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_CREATION_DATE; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_VERSION_CREATED; +import static org.elasticsearch.cluster.routing.RoutingTableTests.updateActiveAllocations; import static org.elasticsearch.discovery.zen.ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING; import static org.elasticsearch.discovery.zen.ZenDiscovery.shouldIgnoreOrRejectNewClusterState; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; @@ -283,4 +304,82 @@ public class ZenDiscoveryUnitTests extends ESTestCase { }); return discoveryNodes; } + + public void testValidateOnUnsupportedIndexVersionCreated() throws Exception { + final int iters = randomIntBetween(3, 10); + for (int i = 0; i < iters; i++) { + ClusterState.Builder stateBuilder = ClusterState.builder(ClusterName.DEFAULT); + final DiscoveryNode otherNode = new DiscoveryNode("other_node", buildNewFakeTransportAddress(), emptyMap(), + EnumSet.allOf(DiscoveryNode.Role.class), Version.CURRENT); + MembershipAction.ValidateJoinRequestRequestHandler request = new MembershipAction.ValidateJoinRequestRequestHandler(); + final boolean incompatible = randomBoolean(); + IndexMetaData indexMetaData = IndexMetaData.builder("test").settings(Settings.builder() + .put(SETTING_VERSION_CREATED, incompatible ? VersionUtils.getPreviousVersion(Version.CURRENT.minimumIndexCompatibilityVersion()) + : VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumIndexCompatibilityVersion(), Version.CURRENT)) + .put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0) + .put(SETTING_CREATION_DATE, System.currentTimeMillis())) + .state(IndexMetaData.State.OPEN) + .build(); + IndexRoutingTable.Builder indexRoutingTableBuilder = IndexRoutingTable.builder(indexMetaData.getIndex()); + RoutingTable.Builder routing = new RoutingTable.Builder(); + routing.addAsNew(indexMetaData); + final ShardId shardId = new ShardId("test", "_na_", 0); + IndexShardRoutingTable.Builder indexShardRoutingBuilder = new IndexShardRoutingTable.Builder(shardId); + + final DiscoveryNode primaryNode = otherNode; + indexShardRoutingBuilder.addShard(TestShardRouting.newShardRouting("test", 0, primaryNode.getId(), null, true, + ShardRoutingState.INITIALIZING, new UnassignedInfo(UnassignedInfo.Reason.INDEX_REOPENED, "getting there"))); + indexRoutingTableBuilder.addIndexShard(indexShardRoutingBuilder.build()); + IndexRoutingTable indexRoutingTable = indexRoutingTableBuilder.build(); + IndexMetaData updatedIndexMetaData = updateActiveAllocations(indexRoutingTable, indexMetaData); + stateBuilder.metaData(MetaData.builder().put(updatedIndexMetaData, false).generateClusterUuidIfNeeded()) + .routingTable(RoutingTable.builder().add(indexRoutingTable).build()); + if (incompatible) { + IllegalStateException ex = expectThrows(IllegalStateException.class, () -> + request.messageReceived(new MembershipAction.ValidateJoinRequest(stateBuilder.build()), null)); + assertEquals("index [test] version not supported: " + + VersionUtils.getPreviousVersion(Version.CURRENT.minimumCompatibilityVersion()) + + " minimum compatible index version is: " + Version.CURRENT.minimumCompatibilityVersion(), ex.getMessage()); + } else { + AtomicBoolean sendResponse = new AtomicBoolean(false); + request.messageReceived(new MembershipAction.ValidateJoinRequest(stateBuilder.build()), new TransportChannel() { + @Override + public String action() { + return null; + } + + @Override + public String getProfileName() { + return null; + } + + @Override + public long getRequestId() { + return 0; + } + + @Override + public String getChannelType() { + return null; + } + + @Override + public void sendResponse(TransportResponse response) throws IOException { + sendResponse.set(true); + } + + @Override + public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException { + + } + + @Override + public void sendResponse(Exception exception) throws IOException { + + } + }); + assertTrue(sendResponse.get()); + } + } + } } diff --git a/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java b/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java index 30ae9a7b6f7..6d8e1a41c5b 100644 --- a/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java +++ b/core/src/test/java/org/elasticsearch/explain/ExplainActionIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; import org.joda.time.DateTime; @@ -138,7 +137,6 @@ public class ExplainActionIT extends ESIntegTestCase { assertThat(response.getGetResult().isExists(), equalTo(true)); assertThat(response.getGetResult().getId(), equalTo("1")); Set fields = new HashSet<>(response.getGetResult().getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly added via templates assertThat(fields, equalTo(singleton("obj1.field1"))); assertThat(response.getGetResult().getFields().get("obj1.field1").getValue().toString(), equalTo("value1")); assertThat(response.getGetResult().isSourceEmpty(), equalTo(true)); @@ -155,7 +153,6 @@ public class ExplainActionIT extends ESIntegTestCase { assertThat(response.getGetResult().isExists(), equalTo(true)); assertThat(response.getGetResult().getId(), equalTo("1")); fields = new HashSet<>(response.getGetResult().getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly added via templates assertThat(fields, equalTo(singleton("obj1.field1"))); assertThat(response.getGetResult().getFields().get("obj1.field1").getValue().toString(), equalTo("value1")); assertThat(response.getGetResult().isSourceEmpty(), equalTo(false)); diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java index a998b56f640..bed21193ac6 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayIndexStateIT.java @@ -414,7 +414,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase { IndexMetaData metaData = state.getMetaData().index("test"); for (NodeEnvironment services : internalCluster().getInstances(NodeEnvironment.class)) { IndexMetaData brokenMeta = IndexMetaData.builder(metaData).settings(Settings.builder().put(metaData.getSettings()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0_beta1.id) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.minimumIndexCompatibilityVersion().id) // this is invalid but should be archived .put("index.similarity.BM25.type", "classic") // this one is not validated ahead of time and breaks allocation diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index 2a37a7f0a60..b106ca64227 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -396,7 +396,7 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { this.upgrade = upgrade; } @Override - public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData) { + public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData, Version minimumIndexCompatibilityVersion) { return upgrade ? IndexMetaData.builder(indexMetaData).build() : indexMetaData; } } diff --git a/core/src/test/java/org/elasticsearch/get/GetActionIT.java b/core/src/test/java/org/elasticsearch/get/GetActionIT.java index 434536ac8d9..2a0505b273b 100644 --- a/core/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/core/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -38,7 +38,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; @@ -88,7 +87,6 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); Set fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(Collections.emptySet())); assertThat(response.getSourceAsBytes(), nullValue()); @@ -97,7 +95,6 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(Collections.emptySet())); assertThat(response.getSourceAsBytes(), nullValue()); @@ -276,7 +273,6 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getId(), equalTo("1")); assertThat(response.getType(), equalTo("type1")); Set fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); @@ -288,7 +284,6 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.getType(), equalTo("type2")); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); @@ -300,7 +295,6 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); @@ -310,7 +304,6 @@ public class GetActionIT extends ESIntegTestCase { assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); @@ -540,8 +533,6 @@ public class GetActionIT extends ESIntegTestCase { client().prepareIndex("test", "my-type1", "1") .setRouting("1") - .setTimestamp("205097") - .setTTL(10000000000000L) .setParent("parent_1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject()) .get(); @@ -773,7 +764,7 @@ public class GetActionIT extends ESIntegTestCase { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); - client().prepareIndex("test", "doc").setId("1").setSource("{}").setParent("1").setTTL(TimeValue.timeValueHours(1).getMillis()).get(); + client().prepareIndex("test", "doc").setId("1").setSource("{}").setParent("1").get(); String[] fieldsList = {"_parent"}; // before refresh - document is only in translog @@ -900,7 +891,7 @@ public class GetActionIT extends ESIntegTestCase { " \"store\": \"" + storedString + "\"" + " },\n" + " \"text\": {\n" + - " \"type\": \"string\",\n" + + " \"type\": \"text\",\n" + " \"fields\": {\n" + " \"token_count\": {\n" + " \"type\": \"token_count\",\n" + diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 97a6c6abf70..b6ea5e719de 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -21,7 +21,6 @@ package org.elasticsearch.index; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -164,8 +163,7 @@ public class IndexSettingsTests extends ESTestCase { if (settings.length > 0) { settingSet.addAll(Arrays.asList(settings)); } - return new IndexSettings(metaData, nodeSettings, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()), - new IndexScopedSettings(Settings.EMPTY, settingSet)); + return new IndexSettings(metaData, nodeSettings, new IndexScopedSettings(Settings.EMPTY, settingSet)); } @@ -207,8 +205,7 @@ public class IndexSettingsTests extends ESTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(indexSettings) .build(); - IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); - return metaData; + return IndexMetaData.builder(name).settings(build).build(); } diff --git a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java index e1d8a878c14..b068d00b6a2 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexingSlowLogTests.java @@ -40,7 +40,7 @@ public class IndexingSlowLogTests extends ESTestCase { public void testSlowLogParsedDocumentPrinterSourceToLog() throws IOException { BytesReference source = JsonXContent.contentBuilder().startObject().field("foo", "bar").endObject().bytes(); ParsedDocument pd = new ParsedDocument(new NumericDocValuesField("version", 1), new NumericDocValuesField("seqNo", 1), "id", - "test", null, 0, -1, null, source, null); + "test", null, null, source, null); Index index = new Index("foo", "123"); // Turning off document logging doesn't log source[] SlowLogParsedDocumentPrinter p = new SlowLogParsedDocumentPrinter(index, pd, 10, true, 0); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 5d5a2e7b1ce..2be63bc2f2b 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -265,14 +265,14 @@ public class InternalEngineTests extends ESTestCase { } - private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, Document document, BytesReference source, Mapping mappingUpdate) { + private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, Document document, BytesReference source, Mapping mappingUpdate) { Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE); Field versionField = new NumericDocValuesField("_version", 0); Field seqNoField = new NumericDocValuesField("_seq_no", 0); document.add(uidField); document.add(versionField); document.add(seqNoField); - return new ParsedDocument(versionField, seqNoField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate); + return new ParsedDocument(versionField, seqNoField, id, type, routing, Arrays.asList(document), source, mappingUpdate); } protected Store createStore() throws IOException { @@ -377,10 +377,10 @@ public class InternalEngineTests extends ESTestCase { assertThat(engine.segmentsStats(false).getMemoryInBytes(), equalTo(0L)); // create two docs and refresh - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); Engine.Index first = new Engine.Index(newUid("1"), doc); Engine.IndexResult firstResult = engine.index(first); - ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null); + ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_2, null); Engine.Index second = new Engine.Index(newUid("2"), doc2); Engine.IndexResult secondResult = engine.index(second); assertThat(secondResult.getTranslogLocation(), greaterThan(firstResult.getTranslogLocation())); @@ -413,7 +413,7 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(0).getDeletedDocs(), equalTo(0)); assertThat(segments.get(0).isCompound(), equalTo(true)); - ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, testDocumentWithTextField(), B_3, null); engine.index(new Engine.Index(newUid("3"), doc3)); engine.refresh("test"); @@ -460,7 +460,7 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.get(1).isCompound(), equalTo(true)); engine.onSettingsChanged(); - ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, testDocumentWithTextField(), B_3, null); engine.index(new Engine.Index(newUid("4"), doc4)); engine.refresh("test"); @@ -494,7 +494,7 @@ public class InternalEngineTests extends ESTestCase { List segments = engine.segments(true); assertThat(segments.isEmpty(), equalTo(true)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); engine.refresh("test"); @@ -502,10 +502,10 @@ public class InternalEngineTests extends ESTestCase { assertThat(segments.size(), equalTo(1)); assertThat(segments.get(0).ramTree, notNullValue()); - ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null); + ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_2, null); engine.index(new Engine.Index(newUid("2"), doc2)); engine.refresh("test"); - ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, testDocumentWithTextField(), B_3, null); engine.index(new Engine.Index(newUid("3"), doc3)); engine.refresh("test"); @@ -520,7 +520,7 @@ public class InternalEngineTests extends ESTestCase { public void testSegmentsWithMergeFlag() throws Exception { try (Store store = createStore(); Engine engine = createEngine(defaultSettings, store, createTempDir(), new TieredMergePolicy())) { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); engine.index(index); engine.flush(); @@ -574,7 +574,7 @@ public class InternalEngineTests extends ESTestCase { Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) { assertThat(engine.segmentsStats(true).getFileSizes().size(), equalTo(0)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); engine.refresh("test"); @@ -584,7 +584,7 @@ public class InternalEngineTests extends ESTestCase { ObjectObjectCursor firstEntry = stats.getFileSizes().iterator().next(); - ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null); + ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_2, null); engine.index(new Engine.Index(newUid("2"), doc2)); engine.refresh("test"); @@ -685,7 +685,7 @@ public class InternalEngineTests extends ESTestCase { public void testFlushIsDisabledDuringTranslogRecovery() throws IOException { assertFalse(engine.isRecovering()); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); engine.close(); @@ -694,7 +694,7 @@ public class InternalEngineTests extends ESTestCase { assertTrue(engine.isRecovering()); engine.recoverFromTranslog(); assertFalse(engine.isRecovering()); - doc = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + doc = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("2"), doc)); engine.flush(); } @@ -706,7 +706,7 @@ public class InternalEngineTests extends ESTestCase { try { initialEngine = engine; for (int i = 0; i < ops; i++) { - final ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), SOURCE, null); + final ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), SOURCE, null); if (randomBoolean()) { final Engine.Index operation = new Engine.Index(newUid("test#1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, i, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false); operations.add(operation); @@ -742,7 +742,7 @@ public class InternalEngineTests extends ESTestCase { initialEngine = engine; for (int i = 0; i < docs; i++) { final String id = Integer.toString(i); - final ParsedDocument doc = testParsedDocument(id, id, "test", null, -1, -1, testDocumentWithTextField(), SOURCE, null); + final ParsedDocument doc = testParsedDocument(id, id, "test", null, testDocumentWithTextField(), SOURCE, null); initialEngine.index(new Engine.Index(newUid(id), doc)); } } finally { @@ -771,7 +771,7 @@ public class InternalEngineTests extends ESTestCase { } public void testConcurrentGetAndFlush() throws Exception { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); final AtomicReference latestGetResult = new AtomicReference<>(); @@ -815,7 +815,7 @@ public class InternalEngineTests extends ESTestCase { // create a document Document document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); // its not there... @@ -854,7 +854,7 @@ public class InternalEngineTests extends ESTestCase { document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_2), SourceFieldMapper.Defaults.FIELD_TYPE)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null); + doc = testParsedDocument("1", "1", "test", null, document, B_2, null); engine.index(new Engine.Index(newUid("1"), doc)); // its not updated yet... @@ -906,7 +906,7 @@ public class InternalEngineTests extends ESTestCase { // add it back document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); engine.index(new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED)); // its not there... @@ -939,7 +939,7 @@ public class InternalEngineTests extends ESTestCase { // now do an update document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); // its not updated yet... @@ -965,7 +965,7 @@ public class InternalEngineTests extends ESTestCase { searchResult.close(); // create a document - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); // its not there... @@ -1001,7 +1001,7 @@ public class InternalEngineTests extends ESTestCase { Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), new LogByteSizeMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, null))) { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); Engine.CommitId commitID = engine.flush(); assertThat(commitID, equalTo(new Engine.CommitId(store.readLastCommittedSegmentsInfo().getId()))); @@ -1028,7 +1028,7 @@ public class InternalEngineTests extends ESTestCase { InternalEngine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), new LogDocMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, null))) { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); Engine.Index doc1 = new Engine.Index(newUid("1"), doc); engine.index(doc1); assertEquals(engine.getLastWriteNanos(), doc1.startTime()); @@ -1081,7 +1081,7 @@ public class InternalEngineTests extends ESTestCase { public void testSyncedFlushSurvivesEngineRestart() throws IOException { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); final Engine.CommitId commitID = engine.flush(); assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID), @@ -1104,14 +1104,14 @@ public class InternalEngineTests extends ESTestCase { public void testSyncedFlushVanishesOnReplay() throws IOException { final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); final Engine.CommitId commitID = engine.flush(); assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID), Engine.SyncedFlushResult.SUCCESS); assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId); - doc = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), new BytesArray("{}"), null); + doc = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), new BytesArray("{}"), null); engine.index(new Engine.Index(newUid("2"), doc)); EngineConfig config = engine.config(); engine.close(); @@ -1121,7 +1121,7 @@ public class InternalEngineTests extends ESTestCase { } public void testVersioningNewCreate() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index create = new Engine.Index(newUid("1"), doc, Versions.MATCH_DELETED); Engine.IndexResult indexResult = engine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1132,7 +1132,7 @@ public class InternalEngineTests extends ESTestCase { } public void testVersioningNewIndex() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1143,7 +1143,7 @@ public class InternalEngineTests extends ESTestCase { } public void testExternalVersioningNewIndex() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 12, VersionType.EXTERNAL, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(12L)); @@ -1154,7 +1154,7 @@ public class InternalEngineTests extends ESTestCase { } public void testVersioningIndexConflict() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1176,7 +1176,7 @@ public class InternalEngineTests extends ESTestCase { } public void testExternalVersioningIndexConflict() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 12, VersionType.EXTERNAL, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(12L)); @@ -1192,7 +1192,7 @@ public class InternalEngineTests extends ESTestCase { } public void testForceVersioningNotAllowedExceptForOlderIndices() throws Exception { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 42, VersionType.FORCE, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(index); @@ -1219,7 +1219,7 @@ public class InternalEngineTests extends ESTestCase { } public void testVersioningIndexConflictWithFlush() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1243,7 +1243,7 @@ public class InternalEngineTests extends ESTestCase { } public void testExternalVersioningIndexConflictWithFlush() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 12, VersionType.EXTERNAL, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(12L)); @@ -1266,7 +1266,7 @@ public class InternalEngineTests extends ESTestCase { new LogByteSizeMergePolicy(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, null))) { // use log MP here we test some behavior in ESMP int numDocs = randomIntBetween(10, 100); for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid(Integer.toString(i)), doc); engine.index(index); engine.refresh("test"); @@ -1277,7 +1277,7 @@ public class InternalEngineTests extends ESTestCase { engine.forceMerge(true, 1, false, false, false); assertEquals(engine.segments(true).size(), 1); - ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid(Integer.toString(0)), doc); engine.delete(new Engine.Delete(index.type(), index.id(), index.uid())); engine.forceMerge(true, 10, true, false, false); //expunge deletes @@ -1288,7 +1288,7 @@ public class InternalEngineTests extends ESTestCase { assertEquals(engine.config().getMergePolicy().toString(), numDocs - 1, test.reader().maxDoc()); } - doc = testParsedDocument(Integer.toString(1), Integer.toString(1), "test", null, -1, -1, testDocument(), B_1, null); + doc = testParsedDocument(Integer.toString(1), Integer.toString(1), "test", null, testDocument(), B_1, null); index = new Engine.Index(newUid(Integer.toString(1)), doc); engine.delete(new Engine.Delete(index.type(), index.id(), index.uid())); engine.forceMerge(true, 10, false, false, false); //expunge deletes @@ -1323,7 +1323,7 @@ public class InternalEngineTests extends ESTestCase { int numDocs = randomIntBetween(1, 20); for (int j = 0; j < numDocs; j++) { i++; - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid(Integer.toString(i)), doc); engine.index(index); } @@ -1356,7 +1356,7 @@ public class InternalEngineTests extends ESTestCase { } public void testVersioningDeleteConflict() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1389,7 +1389,7 @@ public class InternalEngineTests extends ESTestCase { } public void testVersioningDeleteConflictWithFlush() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1428,7 +1428,7 @@ public class InternalEngineTests extends ESTestCase { } public void testVersioningCreateExistsException() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index create = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1440,7 +1440,7 @@ public class InternalEngineTests extends ESTestCase { } public void testVersioningCreateExistsExceptionWithFlush() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index create = new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0, -1, false); Engine.IndexResult indexResult = engine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1454,7 +1454,7 @@ public class InternalEngineTests extends ESTestCase { } public void testVersioningReplicaConflict1() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1484,7 +1484,7 @@ public class InternalEngineTests extends ESTestCase { } public void testVersioningReplicaConflict2() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -1526,7 +1526,7 @@ public class InternalEngineTests extends ESTestCase { } public void testBasicCreatedFlag() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertTrue(indexResult.isCreated()); @@ -1543,7 +1543,7 @@ public class InternalEngineTests extends ESTestCase { } public void testCreatedFlagAfterFlush() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocument(), B_1, null); Engine.Index index = new Engine.Index(newUid("1"), doc); Engine.IndexResult indexResult = engine.index(index); assertTrue(indexResult.isCreated()); @@ -1597,7 +1597,7 @@ public class InternalEngineTests extends ESTestCase { try { // First, with DEBUG, which should NOT log IndexWriter output: - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); engine.flush(); assertFalse(mockAppender.sawIndexWriterMessage); @@ -1653,7 +1653,7 @@ public class InternalEngineTests extends ESTestCase { } else { // index a document id = randomFrom(ids); - ParsedDocument doc = testParsedDocument("test#" + id, id, "test", null, -1, -1, testDocumentWithTextField(), SOURCE, null); + ParsedDocument doc = testParsedDocument("test#" + id, id, "test", null, testDocumentWithTextField(), SOURCE, null); final Engine.Index index = new Engine.Index(newUid("test#" + id), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, rarely() ? 100 : Versions.MATCH_ANY, VersionType.INTERNAL, @@ -1755,7 +1755,7 @@ public class InternalEngineTests extends ESTestCase { // index random number of docs for (int i = 0; i < numDocsPerThread; i++) { final String id = "thread" + threadIdx + "#" + i; - ParsedDocument doc = testParsedDocument(id, id, "test", null, -1, -1, testDocument(), B_1, null); + ParsedDocument doc = testParsedDocument(id, id, "test", null, testDocument(), B_1, null); engine.index(new Engine.Index(newUid(id), doc)); } } catch (Exception e) { @@ -1857,7 +1857,7 @@ public class InternalEngineTests extends ESTestCase { try { // First, with DEBUG, which should NOT log IndexWriter output: - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); engine.index(new Engine.Index(newUid("1"), doc)); engine.flush(); assertFalse(mockAppender.sawIndexWriterMessage); @@ -1886,7 +1886,7 @@ public class InternalEngineTests extends ESTestCase { Document document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_2, null); engine.index(new Engine.Index(newUid("1"), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false)); // Delete document we just added: @@ -2018,7 +2018,7 @@ public class InternalEngineTests extends ESTestCase { public void testTranslogReplayWithFailure() throws IOException { final int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2068,7 +2068,7 @@ public class InternalEngineTests extends ESTestCase { public void testSkipTranslogReplay() throws IOException { final int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2161,7 +2161,7 @@ public class InternalEngineTests extends ESTestCase { } final int numExtraDocs = randomIntBetween(1, 10); for (int i = 0; i < numExtraDocs; i++) { - ParsedDocument doc = testParsedDocument("extra" + Integer.toString(i), "extra" + Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument("extra" + Integer.toString(i), "extra" + Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2190,7 +2190,7 @@ public class InternalEngineTests extends ESTestCase { public void testTranslogReplay() throws IOException { final int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2233,7 +2233,7 @@ public class InternalEngineTests extends ESTestCase { final boolean flush = randomBoolean(); int randomId = randomIntBetween(numDocs + 1, numDocs + 10); String uuidValue = "test#" + Integer.toString(randomId); - ParsedDocument doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(uuidValue), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 1, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2241,7 +2241,7 @@ public class InternalEngineTests extends ESTestCase { engine.flush(); } - doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index idxRequest = new Engine.Index(newUid(uuidValue), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, 2, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult result = engine.index(idxRequest); engine.refresh("test"); @@ -2307,7 +2307,7 @@ public class InternalEngineTests extends ESTestCase { public void testRecoverFromForeignTranslog() throws IOException { final int numDocs = randomIntBetween(1, 10); for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult index = engine.index(firstIndexRequest); assertThat(index.getVersion(), equalTo(1L)); @@ -2395,7 +2395,7 @@ public class InternalEngineTests extends ESTestCase { // create { - ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(0)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); try (InternalEngine engine = new InternalEngine(copy(config, EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG))){ @@ -2455,7 +2455,7 @@ public class InternalEngineTests extends ESTestCase { } public void testCheckDocumentFailure() throws Exception { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); Exception documentFailure = engine.checkIfDocumentFailureOrThrow(new Engine.Index(newUid("1"), doc), new IOException("simulated document failure")); assertThat(documentFailure, instanceOf(IOException.class)); try { @@ -2498,7 +2498,7 @@ public class InternalEngineTests extends ESTestCase { public void testHandleDocumentFailure() throws Exception { try (Store store = createStore()) { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); ThrowingIndexWriter throwingIndexWriter = new ThrowingIndexWriter(store.directory(), new IndexWriterConfig()); try (Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, () -> throwingIndexWriter)) { // test document failure while indexing @@ -2522,7 +2522,7 @@ public class InternalEngineTests extends ESTestCase { public void testDocStats() throws IOException { final int numDocs = randomIntBetween(2, 10); // at least 2 documents otherwise we don't see any deletes below for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = engine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -2532,7 +2532,7 @@ public class InternalEngineTests extends ESTestCase { assertEquals(0, docStats.getDeleted()); engine.forceMerge(randomBoolean(), 1, false, false, false); - ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(0)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult index = engine.index(firstIndexRequest); assertThat(index.getVersion(), equalTo(2L)); @@ -2547,7 +2547,7 @@ public class InternalEngineTests extends ESTestCase { } public void testDoubleDelivery() throws IOException { - final ParsedDocument doc = testParsedDocument("1", "1", "test", null, 100, -1, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); + final ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); Engine.Index operation = randomAppendOnly(1, doc, false); Engine.Index retry = randomAppendOnly(1, doc, true); if (randomBoolean()) { @@ -2603,7 +2603,7 @@ public class InternalEngineTests extends ESTestCase { public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOException { - final ParsedDocument doc = testParsedDocument("1", "1", "test", null, 100, -1, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); + final ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); boolean isRetry = false; long autoGeneratedIdTimestamp = 0; @@ -2637,7 +2637,7 @@ public class InternalEngineTests extends ESTestCase { public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() throws IOException { - final ParsedDocument doc = testParsedDocument("1", "1", "test", null, 100, -1, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); + final ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); boolean isRetry = true; long autoGeneratedIdTimestamp = 0; @@ -2680,7 +2680,7 @@ public class InternalEngineTests extends ESTestCase { int numDocs = randomIntBetween(1000, 10000); List docs = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { - final ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, i, -1, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); + final ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); Engine.Index originalIndex = randomAppendOnly(i, doc, false); Engine.Index retryIndex = randomAppendOnly(i, doc, true); docs.add(originalIndex); @@ -2743,7 +2743,7 @@ public class InternalEngineTests extends ESTestCase { assertEquals(0, engine.getNumIndexVersionsLookups()); List docs = new ArrayList<>(); for (int i = 0; i < numDocs; i++) { - final ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, i, -1, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); + final ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); Engine.Index index = randomAppendOnly(i, doc, false); docs.add(index); } @@ -2821,7 +2821,7 @@ public class InternalEngineTests extends ESTestCase { }); InternalEngine internalEngine = new InternalEngine(config); int docId = 0; - final ParsedDocument doc = testParsedDocument(Integer.toString(docId), Integer.toString(docId), "test", null, docId, -1, + final ParsedDocument doc = testParsedDocument(Integer.toString(docId), Integer.toString(docId), "test", null, testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null); Engine.Index index = randomAppendOnly(docId, doc, false); @@ -2882,7 +2882,7 @@ public class InternalEngineTests extends ESTestCase { })); final Document document = testDocument(); document.add(new TextField("value", "test", Field.Store.YES)); - final ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + final ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_1, null); final Engine.Index first = new Engine.Index(newUid("1"), doc); expectThrows(error.getClass(), () -> engine.index(first)); failWithFatalError.set(false); diff --git a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java index 0ab65825bc8..50ea9c08716 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/ShadowEngineTests.java @@ -171,14 +171,14 @@ public class ShadowEngineTests extends ESTestCase { } - private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, ParseContext.Document document, BytesReference source, Mapping mappingsUpdate) { + private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, ParseContext.Document document, BytesReference source, Mapping mappingsUpdate) { Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE); Field versionField = new NumericDocValuesField("_version", 0); Field seqNoField = new NumericDocValuesField("_seq_no", 0); document.add(uidField); document.add(versionField); document.add(new LongPoint("point_field", 42)); // so that points report memory/disk usage - return new ParsedDocument(versionField, seqNoField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingsUpdate); + return new ParsedDocument(versionField, seqNoField, id, type, routing, Arrays.asList(document), source, mappingsUpdate); } protected Store createStore(Path p) throws IOException { @@ -260,7 +260,7 @@ public class ShadowEngineTests extends ESTestCase { public void testCommitStats() { // create a doc and refresh - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); CommitStats stats1 = replicaEngine.commitStats(); @@ -292,10 +292,10 @@ public class ShadowEngineTests extends ESTestCase { assertThat(primaryEngine.segmentsStats(false).getMemoryInBytes(), equalTo(0L)); // create a doc and refresh - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); - ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null); + ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_2, null); primaryEngine.index(new Engine.Index(newUid("2"), doc2)); primaryEngine.refresh("test"); @@ -354,7 +354,7 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.get(0).isCompound(), equalTo(true)); - ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, testDocumentWithTextField(), B_3, null); primaryEngine.index(new Engine.Index(newUid("3"), doc3)); primaryEngine.refresh("test"); @@ -426,7 +426,7 @@ public class ShadowEngineTests extends ESTestCase { primaryEngine.flush(); replicaEngine.refresh("test"); - ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, testDocumentWithTextField(), B_3, null); primaryEngine.index(new Engine.Index(newUid("4"), doc4)); primaryEngine.refresh("test"); @@ -459,7 +459,7 @@ public class ShadowEngineTests extends ESTestCase { List segments = primaryEngine.segments(true); assertThat(segments.isEmpty(), equalTo(true)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); primaryEngine.refresh("test"); @@ -467,10 +467,10 @@ public class ShadowEngineTests extends ESTestCase { assertThat(segments.size(), equalTo(1)); assertThat(segments.get(0).ramTree, notNullValue()); - ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null); + ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, testDocumentWithTextField(), B_2, null); primaryEngine.index(new Engine.Index(newUid("2"), doc2)); primaryEngine.refresh("test"); - ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null); + ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, testDocumentWithTextField(), B_3, null); primaryEngine.index(new Engine.Index(newUid("3"), doc3)); primaryEngine.refresh("test"); @@ -496,7 +496,7 @@ public class ShadowEngineTests extends ESTestCase { // create a document ParseContext.Document document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_1, null); try { replicaEngine.index(new Engine.Index(newUid("1"), doc)); fail("should have thrown an exception"); @@ -515,7 +515,7 @@ public class ShadowEngineTests extends ESTestCase { // index a document document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); try { replicaEngine.index(new Engine.Index(newUid("1"), doc)); fail("should have thrown an exception"); @@ -534,7 +534,7 @@ public class ShadowEngineTests extends ESTestCase { // Now, add a document to the primary so we can test shadow engine deletes document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); primaryEngine.flush(); replicaEngine.refresh("test"); @@ -589,7 +589,7 @@ public class ShadowEngineTests extends ESTestCase { // create a document ParseContext.Document document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); // its not there... @@ -642,7 +642,7 @@ public class ShadowEngineTests extends ESTestCase { document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_2), SourceFieldMapper.Defaults.FIELD_TYPE)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null); + doc = testParsedDocument("1", "1", "test", null, document, B_2, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); // its not updated yet... @@ -712,7 +712,7 @@ public class ShadowEngineTests extends ESTestCase { // add it back document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); // its not there... @@ -757,7 +757,7 @@ public class ShadowEngineTests extends ESTestCase { // now do an update document = testDocument(); document.add(new TextField("value", "test1", Field.Store.YES)); - doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + doc = testParsedDocument("1", "1", "test", null, document, B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); // its not updated yet... @@ -793,7 +793,7 @@ public class ShadowEngineTests extends ESTestCase { searchResult.close(); // create a document - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); // its not there... @@ -838,7 +838,7 @@ public class ShadowEngineTests extends ESTestCase { } public void testFailEngineOnCorruption() { - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); primaryEngine.flush(); MockDirectoryWrapper leaf = DirectoryUtils.getLeaf(replicaEngine.config().getStore().directory(), MockDirectoryWrapper.class); @@ -875,7 +875,7 @@ public class ShadowEngineTests extends ESTestCase { */ public void testFailStart() throws IOException { // Need a commit point for this - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, testDocumentWithTextField(), B_1, null); primaryEngine.index(new Engine.Index(newUid("1"), doc)); primaryEngine.flush(); @@ -961,7 +961,7 @@ public class ShadowEngineTests extends ESTestCase { // create a document ParseContext.Document document = testDocumentWithTextField(); document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE)); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null); + ParsedDocument doc = testParsedDocument("1", "1", "test", null, document, B_1, null); pEngine.index(new Engine.Index(newUid("1"), doc)); pEngine.flush(true, true); @@ -983,7 +983,7 @@ public class ShadowEngineTests extends ESTestCase { public void testDocStats() throws IOException { final int numDocs = randomIntBetween(2, 10); // at least 2 documents otherwise we don't see any deletes below for (int i = 0; i < numDocs; i++) { - ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null); + ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, testDocument(), new BytesArray("{}"), null); Engine.Index firstIndexRequest = new Engine.Index(newUid(Integer.toString(i)), doc, SequenceNumbersService.UNASSIGNED_SEQ_NO, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false); Engine.IndexResult indexResult = primaryEngine.index(firstIndexRequest); assertThat(indexResult.getVersion(), equalTo(1L)); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 1fa7272d2d9..5e26f26978e 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -41,19 +41,14 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.mapper.BinaryFieldMapper; import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; -import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper; -import org.elasticsearch.index.mapper.LegacyByteFieldMapper; -import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper; -import org.elasticsearch.index.mapper.LegacyFloatFieldMapper; -import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper; -import org.elasticsearch.index.mapper.LegacyLongFieldMapper; -import org.elasticsearch.index.mapper.LegacyShortFieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper.BuilderContext; import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ParentFieldMapper; -import org.elasticsearch.index.mapper.StringFieldMapper; +import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; @@ -104,25 +99,31 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase { final MappedFieldType fieldType; final BuilderContext context = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); if (type.equals("string")) { - fieldType = new StringFieldMapper.Builder(fieldName).tokenized(false).fielddata(docValues == false).docValues(docValues).build(context).fieldType(); - } else if (type.equals("float")) { - fieldType = new LegacyFloatFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); - } else if (type.equals("double")) { - fieldType = new LegacyDoubleFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); - } else if (type.equals("long")) { - fieldType = new LegacyLongFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); - } else if (type.equals("int")) { - fieldType = new LegacyIntegerFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); - } else if (type.equals("short")) { - fieldType = new LegacyShortFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); - } else if (type.equals("byte")) { - fieldType = new LegacyByteFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); - } else if (type.equals("geo_point")) { - if (indexService.getIndexSettings().getIndexVersionCreated().before(Version.V_2_2_0)) { - fieldType = new LegacyGeoPointFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + if (docValues) { + fieldType = new KeywordFieldMapper.Builder(fieldName).build(context).fieldType(); } else { - fieldType = new GeoPointFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); + fieldType = new TextFieldMapper.Builder(fieldName).fielddata(true).build(context).fieldType(); } + } else if (type.equals("float")) { + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.FLOAT) + .docValues(docValues).build(context).fieldType(); + } else if (type.equals("double")) { + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.DOUBLE) + .docValues(docValues).build(context).fieldType(); + } else if (type.equals("long")) { + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.LONG) + .docValues(docValues).build(context).fieldType(); + } else if (type.equals("int")) { + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.INTEGER) + .docValues(docValues).build(context).fieldType(); + } else if (type.equals("short")) { + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.SHORT) + .docValues(docValues).build(context).fieldType(); + } else if (type.equals("byte")) { + fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.BYTE) + .docValues(docValues).build(context).fieldType(); + } else if (type.equals("geo_point")) { + fieldType = new LatLonPointFieldMapper.Builder(fieldName).docValues(docValues).build(context).fieldType(); } else if (type.equals("_parent")) { fieldType = new ParentFieldMapper.Builder("_type").type(fieldName).build(context).fieldType(); } else if (type.equals("binary")) { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java index b1a3c9c0886..df7df5771cb 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java @@ -20,8 +20,8 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; +import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.StringField; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.elasticsearch.Version; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -39,29 +39,16 @@ public abstract class AbstractGeoFieldDataTestCase extends AbstractFieldDataImpl protected Field randomGeoPointField(String fieldName, Field.Store store) { GeoPoint point = randomPoint(random()); - if (indexService.getIndexSettings().getIndexVersionCreated().before(Version.V_2_2_0)) { - return new StringField(fieldName, point.lat()+","+point.lon(), store); - } - final GeoPointField.TermEncoding termEncoding; - termEncoding = indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_2_3_0) ? - GeoPointField.TermEncoding.PREFIX : GeoPointField.TermEncoding.NUMERIC; - return new GeoPointField(fieldName, point.lat(), point.lon(), termEncoding, store); + return new LatLonDocValuesField(fieldName, point.lat(), point.lon()); } @Override protected boolean hasDocValues() { - // prior to 22 docValues were not required - if (indexService.getIndexSettings().getIndexVersionCreated().before(Version.V_2_2_0)) { - return false; - } return true; } @Override protected long minRamBytesUsed() { - if (indexService.getIndexSettings().getIndexVersionCreated().before(Version.V_2_2_0)) { - return super.minRamBytesUsed(); - } return 0; } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java index af60e2e7b09..cefa9c74ea3 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/IndexFieldDataServiceTests.java @@ -38,12 +38,6 @@ import org.elasticsearch.index.fielddata.plain.SortedSetDVOrdinalsIndexFieldData import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.LegacyByteFieldMapper; -import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper; -import org.elasticsearch.index.mapper.LegacyFloatFieldMapper; -import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper; -import org.elasticsearch.index.mapper.LegacyLongFieldMapper; -import org.elasticsearch.index.mapper.LegacyShortFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; @@ -198,11 +192,11 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase { } public void testRequireDocValuesOnLongs() { - doTestRequireDocValues(new LegacyLongFieldMapper.LongFieldType()); + doTestRequireDocValues(new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG)); } public void testRequireDocValuesOnDoubles() { - doTestRequireDocValues(new LegacyDoubleFieldMapper.DoubleFieldType()); + doTestRequireDocValues(new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE)); } public void testRequireDocValuesOnBools() { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java index 1f23f726ef6..e9cfff5a4fb 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java @@ -60,7 +60,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { private final String grandChildType = "grand-child"; @Before - public void before() throws Exception { + public void setupData() throws Exception { mapperService.merge( childType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), MapperService.MergeReason.MAPPING_UPDATE, false ); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/AbstractNumericFieldMapperTestCase.java b/core/src/test/java/org/elasticsearch/index/mapper/AbstractNumericFieldMapperTestCase.java new file mode 100644 index 00000000000..7ee837e71c7 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/AbstractNumericFieldMapperTestCase.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.VersionUtils; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collection; +import java.util.Set; + +import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; +import static org.hamcrest.Matchers.containsString; + +public abstract class AbstractNumericFieldMapperTestCase extends ESSingleNodeTestCase { + protected Set TYPES; + protected IndexService indexService; + protected DocumentMapperParser parser; + + @Before + public void setup() { + indexService = createIndex("test"); + parser = indexService.mapperService().documentMapperParser(); + setTypeList(); + } + + @Override + protected Collection> getPlugins() { + return pluginList(InternalSettingsPlugin.class); + } + + + protected abstract void setTypeList(); + + public void testDefaults() throws Exception { + for (String type : TYPES) { + doTestDefaults(type); + } + } + + protected abstract void doTestDefaults(String type) throws Exception; + + public void testNotIndexed() throws Exception { + for (String type : TYPES) { + doTestNotIndexed(type); + } + } + + protected abstract void doTestNotIndexed(String type) throws Exception; + + public void testNoDocValues() throws Exception { + for (String type : TYPES) { + doTestNoDocValues(type); + } + } + + protected abstract void doTestNoDocValues(String type) throws Exception; + + public void testStore() throws Exception { + for (String type : TYPES) { + doTestStore(type); + } + } + + protected abstract void doTestStore(String type) throws Exception; + + public void testCoerce() throws Exception { + for (String type : TYPES) { + doTestCoerce(type); + } + } + + protected abstract void doTestCoerce(String type) throws IOException; + + public void testIncludeInAll() throws Exception { + for (String type : TYPES) { + doTestIncludeInAll(type); + } + } + + protected abstract void doTestIncludeInAll(String type) throws Exception; + + public void testNullValue() throws IOException { + for (String type : TYPES) { + doTestNullValue(type); + } + } + + protected abstract void doTestNullValue(String type) throws IOException; + + public void testEmptyName() throws IOException { + // after version 5 + for (String type : TYPES) { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("").field("type", type).endObject().endObject() + .endObject().endObject().string(); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> parser.parse("type", new CompressedXContent(mapping)) + ); + assertThat(e.getMessage(), containsString("name cannot be empty string")); + } + + // before 5.x + Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); + Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); + indexService = createIndex("test_old", oldIndexSettings); + parser = indexService.mapperService().documentMapperParser(); + for (String type : TYPES) { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("").field("type", type).endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); + assertEquals(mapping, defaultMapper.mappingSource().string()); + } + } + +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java index 989f1fa6835..2243c1182bd 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/BinaryFieldMapperTests.java @@ -111,12 +111,5 @@ public class BinaryFieldMapperTests extends ESSingleNodeTestCase { () -> createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - - DocumentMapper defaultMapper = createIndex("test_old", oldIndexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index b07f3b43ff6..308f7755275 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -24,36 +24,27 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import org.junit.Before; import java.io.IOException; import java.util.Collection; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; public class BooleanFieldMapperTests extends ESSingleNodeTestCase { @@ -62,7 +53,7 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser; @Before - public void before() { + public void setup() { indexService = createIndex("test"); parser = indexService.mapperService().documentMapperParser(); } @@ -179,44 +170,16 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "bool1")); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "bool2")); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "bool3")); - } - - public void testBwCompatDocValues() throws Exception { - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("bool1") - .field("type", "boolean") - .endObject() - .startObject("bool2") - .field("type", "boolean") - .field("index", "no") - .endObject() - .startObject("bool3") - .field("type", "boolean") - .field("index", "not_analyzed") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("bool1", true) - .field("bool2", true) - .field("bool3", true) - .endObject() - .bytes()); - Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "bool1")); - assertEquals(DocValuesType.NONE, LegacyStringMappingTests.docValuesType(doc, "bool2")); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "bool3")); + IndexableField[] fields = doc.getFields("bool1"); + assertEquals(2, fields.length); + assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType()); + assertEquals(DocValuesType.SORTED_NUMERIC, fields[1].fieldType().docValuesType()); + fields = doc.getFields("bool2"); + assertEquals(1, fields.length); + assertEquals(DocValuesType.SORTED_NUMERIC, fields[0].fieldType().docValuesType()); + fields = doc.getFields("bool3"); + assertEquals(DocValuesType.NONE, fields[0].fieldType().docValuesType()); + assertEquals(DocValuesType.SORTED_NUMERIC, fields[1].fieldType().docValuesType()); } public void testEmptyName() throws IOException { @@ -229,14 +192,5 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapper2xTests.java b/core/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapper2xTests.java deleted file mode 100644 index a44941a19d8..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapper2xTests.java +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; - -import java.io.IOException; -import java.util.Collection; -import java.util.Map; - -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; - -public class CompletionFieldMapper2xTests extends ESSingleNodeTestCase { - private final Version PRE2X_VERSION = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_1); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testDefaultConfiguration() throws IOException { - String mapping = jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("completion") - .field("type", "completion") - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id).build()) - .mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - assertThat(fieldMapper, instanceOf(CompletionFieldMapper2x.class)); - - CompletionFieldMapper2x completionFieldMapper = (CompletionFieldMapper2x) fieldMapper; - assertThat(completionFieldMapper.isStoringPayloads(), is(false)); - } - - public void testThatSerializationIncludesAllElements() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("completion") - .field("type", "completion") - .field("analyzer", "simple") - .field("search_analyzer", "standard") - .field("payloads", true) - .field("preserve_separators", false) - .field("preserve_position_increments", true) - .field("max_input_length", 14) - - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id).build()) - .mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - assertThat(fieldMapper, instanceOf(CompletionFieldMapper2x.class)); - - CompletionFieldMapper2x completionFieldMapper = (CompletionFieldMapper2x) fieldMapper; - XContentBuilder builder = jsonBuilder().startObject(); - completionFieldMapper.toXContent(builder, null).endObject(); - builder.close(); - Map serializedMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - serializedMap = parser.map(); - } - Map configMap = (Map) serializedMap.get("completion"); - assertThat(configMap.get("analyzer").toString(), is("simple")); - assertThat(configMap.get("search_analyzer").toString(), is("standard")); - assertThat(Boolean.valueOf(configMap.get("payloads").toString()), is(true)); - assertThat(Boolean.valueOf(configMap.get("preserve_separators").toString()), is(false)); - assertThat(Boolean.valueOf(configMap.get("preserve_position_increments").toString()), is(true)); - assertThat(Integer.valueOf(configMap.get("max_input_length").toString()), is(14)); - } - - public void testThatSerializationCombinesToOneAnalyzerFieldIfBothAreEqual() throws Exception { - String mapping = jsonBuilder().startObject().startObject("type1") - .startObject("properties").startObject("completion") - .field("type", "completion") - .field("analyzer", "simple") - .field("search_analyzer", "simple") - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id).build()) - .mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); - - FieldMapper fieldMapper = defaultMapper.mappers().getMapper("completion"); - assertThat(fieldMapper, instanceOf(CompletionFieldMapper2x.class)); - - CompletionFieldMapper2x completionFieldMapper = (CompletionFieldMapper2x) fieldMapper; - XContentBuilder builder = jsonBuilder().startObject(); - completionFieldMapper.toXContent(builder, null).endObject(); - builder.close(); - Map serializedMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - serializedMap = parser.map(); - } - Map configMap = (Map) serializedMap.get("completion"); - assertThat(configMap.get("analyzer").toString(), is("simple")); - } - - public void testEmptyName() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("").field("type", "completion").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id).build()) - .mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - FieldMapper fieldMapper = defaultMapper.mappers().getMapper(""); - assertThat(fieldMapper, instanceOf(CompletionFieldMapper2x.class)); - - CompletionFieldMapper2x completionFieldMapper = (CompletionFieldMapper2x) fieldMapper; - assertThat(completionFieldMapper.isStoringPayloads(), is(false)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/CompoundTypesTests.java b/core/src/test/java/org/elasticsearch/index/mapper/CompoundTypesTests.java deleted file mode 100644 index 1c00f821996..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/CompoundTypesTests.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Collection; - -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; - -public class CompoundTypesTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); - - public void testBackCompatStringType() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "string").endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("field1"), equalTo("value1")); - assertThat((double) doc.rootDoc().getField("field1").boost(), closeTo(1.0d, 0.000001d)); - assertThat(doc.rootDoc().get("field2"), equalTo("value2")); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("field1").field("value", "value1").field("boost", 2.0f).endObject() - .field("field2", "value2") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("field1"), equalTo("value1")); - assertThat((double) doc.rootDoc().getField("field1").boost(), closeTo(2.0d, 0.000001d)); - assertThat(doc.rootDoc().get("field2"), equalTo("value2")); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "value1") - .field("field2", "value2") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("field1"), equalTo("value1")); - assertThat((double) doc.rootDoc().getField("field1").boost(), closeTo(1.0d, 0.000001d)); - assertThat(doc.rootDoc().get("field2"), equalTo("value2")); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/CustomBoostMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/CustomBoostMappingTests.java deleted file mode 100644 index 391f987e714..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/CustomBoostMappingTests.java +++ /dev/null @@ -1,187 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.search.BoostQuery; -import org.apache.lucene.search.TermQuery; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentFieldMappers; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Collection; - -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; - -public class CustomBoostMappingTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testBackCompatCustomBoostValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("s_field").field("type", "string").endObject() - .startObject("l_field").field("type", "long").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("i_field").field("type", "integer").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("sh_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("b_field").field("type", "byte").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("d_field").field("type", "double").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("f_field").field("type", "float").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("date_field").field("type", "date").startObject("norms").field("enabled", true).endObject().endObject() - .endObject().endObject().endObject().string(); - - DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() - .startObject("s_field").field("value", "s_value").field("boost", 2.0f).endObject() - .startObject("l_field").field("value", 1L).field("boost", 3.0f).endObject() - .startObject("i_field").field("value", 1).field("boost", 4.0f).endObject() - .startObject("sh_field").field("value", 1).field("boost", 5.0f).endObject() - .startObject("b_field").field("value", 1).field("boost", 6.0f).endObject() - .startObject("d_field").field("value", 1).field("boost", 7.0f).endObject() - .startObject("f_field").field("value", 1).field("boost", 8.0f).endObject() - .startObject("date_field").field("value", "20100101").field("boost", 9.0f).endObject() - .endObject().bytes()); - - assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(2.0f)); - assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(3.0f)); - assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(4.0f)); - assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(5.0f)); - assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(6.0f)); - assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(7.0f)); - assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f)); - assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f)); - } - - public void testBackCompatFieldMappingBoostValues() throws Exception { - { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("s_field").field("type", "keyword").field("boost", 2.0f).endObject() - .startObject("l_field").field("type", "long").field("boost", 3.0f).endObject() - .startObject("i_field").field("type", "integer").field("boost", 4.0f).endObject() - .startObject("sh_field").field("type", "short").field("boost", 5.0f).endObject() - .startObject("b_field").field("type", "byte").field("boost", 6.0f).endObject() - .startObject("d_field").field("type", "double").field("boost", 7.0f).endObject() - .startObject("f_field").field("type", "float").field("boost", 8.0f).endObject() - .startObject("date_field").field("type", "date").field("boost", 9.0f).endObject() - .endObject().endObject().endObject().string(); - IndexService indexService = createIndex("test", BW_SETTINGS); - QueryShardContext context = indexService.newQueryShardContext(0, null, () -> 0L); - DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - DocumentFieldMappers fieldMappers = mapper.mappers(); - assertThat(fieldMappers.getMapper("s_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("l_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("i_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("sh_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("b_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("d_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("f_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - assertThat(fieldMappers.getMapper("date_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() - .field("s_field", "s_value") - .field("l_field", 1L) - .field("i_field", 1) - .field("sh_field", 1) - .field("b_field", 1) - .field("d_field", 1) - .field("f_field", 1) - .field("date_field", "20100101") - .endObject().bytes()); - - assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(2.0f)); - assertThat(doc.rootDoc().getField("s_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(3.0f)); - assertThat(doc.rootDoc().getField("l_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(4.0f)); - assertThat(doc.rootDoc().getField("i_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(5.0f)); - assertThat(doc.rootDoc().getField("sh_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(6.0f)); - assertThat(doc.rootDoc().getField("b_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(7.0f)); - assertThat(doc.rootDoc().getField("d_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f)); - assertThat(doc.rootDoc().getField("f_field").fieldType().omitNorms(), equalTo(false)); - assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f)); - assertThat(doc.rootDoc().getField("date_field").fieldType().omitNorms(), equalTo(false)); - } - - { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("s_field").field("type", "keyword").field("boost", 2.0f).endObject() - .startObject("l_field").field("type", "long").field("boost", 3.0f).endObject() - .startObject("i_field").field("type", "integer").field("boost", 4.0f).endObject() - .startObject("sh_field").field("type", "short").field("boost", 5.0f).endObject() - .startObject("b_field").field("type", "byte").field("boost", 6.0f).endObject() - .startObject("d_field").field("type", "double").field("boost", 7.0f).endObject() - .startObject("f_field").field("type", "float").field("boost", 8.0f).endObject() - .startObject("date_field").field("type", "date").field("boost", 9.0f).endObject() - .endObject().endObject().endObject().string(); - IndexService indexService = createIndex("text"); - QueryShardContext context = indexService.newQueryShardContext(0, null, () -> 0L); - DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - DocumentFieldMappers fieldMappers = mapper.mappers(); - assertThat(fieldMappers.getMapper("s_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("l_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("i_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("sh_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("b_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("d_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("f_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - assertThat(fieldMappers.getMapper("date_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject() - .field("s_field", "s_value") - .field("l_field", 1L) - .field("i_field", 1) - .field("sh_field", 1) - .field("b_field", 1) - .field("d_field", 1) - .field("f_field", 1) - .field("date_field", "20100101") - .endObject().bytes()); - - assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("s_field").fieldType().omitNorms(), equalTo(true)); - assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(1f)); - assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(1f)); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java index cf6335c808a..4242581ffdb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldMapperTests.java @@ -21,16 +21,12 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; @@ -39,7 +35,6 @@ import org.junit.Before; import java.io.IOException; import java.util.Collection; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; public class DateFieldMapperTests extends ESSingleNodeTestCase { @@ -48,7 +43,7 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser; @Before - public void before() { + public void setup() { indexService = createIndex("test"); parser = indexService.mapperService().documentMapperParser(); } @@ -340,15 +335,6 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().toString()); } /** diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java index 2c62a2952a8..6cd16ffc072 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentFieldMapperTests.java @@ -24,6 +24,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.document.Field; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -98,7 +99,7 @@ public class DocumentFieldMapperTests extends LuceneTestCase { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index 55f6b0b52cd..f5a92d3f979 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -120,6 +120,51 @@ public class DocumentParserTests extends ESSingleNodeTestCase { assertEquals("789", values[2]); } + public void testDotsWithExistingNestedMapper() throws Exception { + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("foo").field("type", "nested").startObject("properties") + .startObject("bar").field("type", "integer") + .endObject().endObject().endObject().endObject().endObject().endObject().string(); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); + + BytesReference bytes = XContentFactory.jsonBuilder() + .startObject() + .field("foo.bar", 123) + .endObject().bytes(); + MapperParsingException e = expectThrows(MapperParsingException.class, + () -> mapper.parse("test", "type", "1", bytes)); + assertEquals( + "Cannot add a value for field [foo.bar] since one of the intermediate objects is mapped as a nested object: [foo]", + e.getMessage()); + } + + public void testDotsWithDynamicNestedMapper() throws Exception { + DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startArray("dynamic_templates") + .startObject() + .startObject("objects_as_nested") + .field("match_mapping_type", "object") + .startObject("mapping") + .field("type", "nested") + .endObject() + .endObject() + .endObject() + .endArray().endObject().endObject().string(); + DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping)); + + BytesReference bytes = XContentFactory.jsonBuilder() + .startObject() + .field("foo.bar",42) + .endObject().bytes(); + MapperParsingException e = expectThrows(MapperParsingException.class, + () -> mapper.parse("test", "type", "1", bytes)); + assertEquals( + "It is forbidden to create dynamic nested objects ([foo]) through `copy_to` or dots in field names", + e.getMessage()); + } + public void testPropagateDynamicWithExistingMapper() throws Exception { DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser(); String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java index 9830488989c..621e9a8cccc 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java @@ -41,12 +41,6 @@ public class DynamicTemplateTests extends ESTestCase { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1)); assertEquals("Illegal dynamic template parameter: [random_param]", e.getMessage()); - - // but no issues on 2.x for bw compat - DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_2_3_0); - XContentBuilder builder = JsonXContent.contentBuilder(); - template.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", builder.string()); } public void testParseUnknownMatchType() { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java index 72f9d09808f..af5e2553be7 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -86,13 +85,9 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T")); assertThat(doc.rootDoc().getField("field.point"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); - } else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - GeoPoint point = new GeoPoint().resetFromIndexableField(doc.rootDoc().getField("field.point")); - assertThat(point.lat(), closeTo(42.0, 1e-5)); - assertThat(point.lon(), closeTo(51.0, 1e-5)); - } + GeoPoint point = new GeoPoint().resetFromIndexableField(doc.rootDoc().getField("field.point")); + assertThat(point.lat(), closeTo(42.0, 1e-5)); + assertThat(point.lon(), closeTo(51.0, 1e-5)); assertThat(doc.rootDoc().getField("field.shape"), notNullValue()); @@ -149,15 +144,9 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T")); assertThat(doc.rootDoc().getField("field.point"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0))); - } else { - GeoPoint point = new GeoPoint().resetFromIndexableField(doc.rootDoc().getField("field.point")); - assertThat(point.lat(), closeTo(42.0, 1E-5)); - assertThat(point.lon(), closeTo(51.0, 1E-5)); - } + GeoPoint point = new GeoPoint().resetFromIndexableField(doc.rootDoc().getField("field.point")); + assertThat(point.lat(), closeTo(42.0, 1E-5)); + assertThat(point.lon(), closeTo(51.0, 1E-5)); IndexableField shape = doc.rootDoc().getField("field.shape"); assertThat(shape, notNullValue()); @@ -169,11 +158,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { IndexableField raw = doc.rootDoc().getField("field.field.raw"); assertThat(raw, notNullValue()); - if (version.before(Version.V_5_0_0_alpha1)) { - assertThat(raw.stringValue(), is("foo")); - } else { - assertThat(raw.binaryValue(), is(new BytesRef("foo"))); - } + assertThat(raw.binaryValue(), is(new BytesRef("foo"))); } public void testExternalValuesWithMultifieldTwoLevels() throws Exception { @@ -226,11 +211,6 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T")); assertThat(doc.rootDoc().getField("field.point"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0")); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoPointField.encodeLatLon(42.0, 51.0))); - } assertThat(doc.rootDoc().getField("field.shape"), notNullValue()); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java index 1efae7ccb26..94f129219f2 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java @@ -19,9 +19,9 @@ package org.elasticsearch.index.mapper; +import org.apache.lucene.index.IndexableField; import org.locationtech.spatial4j.shape.Point; import org.apache.lucene.document.Field; -import org.elasticsearch.Version; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.builders.ShapeBuilders; @@ -57,8 +57,6 @@ public class ExternalMapper extends FieldMapper { private BinaryFieldMapper.Builder binBuilder = new BinaryFieldMapper.Builder(Names.FIELD_BIN); private BooleanFieldMapper.Builder boolBuilder = new BooleanFieldMapper.Builder(Names.FIELD_BOOL); - private GeoPointFieldMapper.Builder pointBuilder = new GeoPointFieldMapper.Builder(Names.FIELD_POINT); - private LegacyGeoPointFieldMapper.Builder legacyPointBuilder = new LegacyGeoPointFieldMapper.Builder(Names.FIELD_POINT); private LatLonPointFieldMapper.Builder latLonPointBuilder = new LatLonPointFieldMapper.Builder(Names.FIELD_POINT); private GeoShapeFieldMapper.Builder shapeBuilder = new GeoShapeFieldMapper.Builder(Names.FIELD_SHAPE); private Mapper.Builder stringBuilder; @@ -83,14 +81,7 @@ public class ExternalMapper extends FieldMapper { context.path().add(name); BinaryFieldMapper binMapper = binBuilder.build(context); BooleanFieldMapper boolMapper = boolBuilder.build(context); - BaseGeoPointFieldMapper pointMapper; - if (context.indexCreatedVersion().before(Version.V_2_2_0)) { - pointMapper = legacyPointBuilder.build(context); - } else if (context.indexCreatedVersion().onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - pointMapper = latLonPointBuilder.build(context); - } else { - pointMapper = pointBuilder.build(context); - } + BaseGeoPointFieldMapper pointMapper = latLonPointBuilder.build(context); GeoShapeFieldMapper shapeMapper = shapeBuilder.build(context); FieldMapper stringMapper = (FieldMapper)stringBuilder.build(context); context.path().remove(); @@ -190,7 +181,7 @@ public class ExternalMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { throw new UnsupportedOperationException(); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalMetadataMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalMetadataMapper.java index 6d585b7e7e7..234e4fa312b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalMetadataMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalMetadataMapper.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Store; import org.apache.lucene.document.StringField; +import org.apache.lucene.index.IndexableField; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.BooleanFieldMapper; @@ -54,7 +55,7 @@ public class ExternalMetadataMapper extends MetadataFieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { // handled in post parse } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java index 5a7c38ac05d..2969b8392b5 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java @@ -22,17 +22,16 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext; -import org.elasticsearch.index.mapper.StringFieldMapper; import org.elasticsearch.index.mapper.StringFieldType; import java.io.IOException; @@ -122,39 +121,33 @@ public class FakeStringFieldMapper extends FieldMapper { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); } - @Override - protected StringFieldMapper clone() { - return (StringFieldMapper) super.clone(); - } - @Override protected boolean customBoost() { return true; } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { - StringFieldMapper.ValueAndBoost valueAndBoost = parseCreateFieldForString(context, fieldType().boost()); - if (valueAndBoost.value() == null) { + protected void parseCreateField(ParseContext context, List fields) throws IOException { + String value; + if (context.externalValueSet()) { + value = context.externalValue().toString(); + } else { + value = context.parser().textOrNull(); + } + + if (value == null) { return; } + if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { - Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType()); + Field field = new Field(fieldType().name(), value, fieldType()); fields.add(field); } if (fieldType().hasDocValues()) { - fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(valueAndBoost.value()))); + fields.add(new SortedSetDocValuesField(fieldType().name(), new BytesRef(value))); } } - public static StringFieldMapper.ValueAndBoost parseCreateFieldForString(ParseContext context, float defaultBoost) throws IOException { - if (context.externalValueSet()) { - return new StringFieldMapper.ValueAndBoost(context.externalValue().toString(), defaultBoost); - } - XContentParser parser = context.parser(); - return new StringFieldMapper.ValueAndBoost(parser.textOrNull(), defaultBoost); - } - @Override protected String contentType() { return CONTENT_TYPE; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldLevelBoostTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldLevelBoostTests.java deleted file mode 100644 index 42089752842..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldLevelBoostTests.java +++ /dev/null @@ -1,285 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Collection; - -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; - -public class FieldLevelBoostTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testBackCompatFieldLevelBoost() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") - .startObject("str_field").field("type", "string").endObject() - .startObject("int_field").field("type", "integer").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("byte_field").field("type", "byte").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("date_field").field("type", "date").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("double_field").field("type", "double").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("float_field").field("type", "float").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("long_field").field("type", "long").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() - .endObject().endObject().endObject() - .string(); - - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); - BytesReference json = XContentFactory.jsonBuilder().startObject() - .startObject("str_field").field("boost", 2.0).field("value", "some name").endObject() - .startObject("int_field").field("boost", 3.0).field("value", 10).endObject() - .startObject("byte_field").field("boost", 4.0).field("value", 20).endObject() - .startObject("date_field").field("boost", 5.0).field("value", "2012-01-10").endObject() - .startObject("double_field").field("boost", 6.0).field("value", 30.0).endObject() - .startObject("float_field").field("boost", 7.0).field("value", 40.0).endObject() - .startObject("long_field").field("boost", 8.0).field("value", 50).endObject() - .startObject("short_field").field("boost", 9.0).field("value", 60).endObject() - .endObject() - .bytes(); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - - IndexableField f = doc.getField("str_field"); - assertThat((double) f.boost(), closeTo(2.0, 0.001)); - - f = doc.getField("int_field"); - assertThat((double) f.boost(), closeTo(3.0, 0.001)); - - f = doc.getField("byte_field"); - assertThat((double) f.boost(), closeTo(4.0, 0.001)); - - f = doc.getField("date_field"); - assertThat((double) f.boost(), closeTo(5.0, 0.001)); - - f = doc.getField("double_field"); - assertThat((double) f.boost(), closeTo(6.0, 0.001)); - - f = doc.getField("float_field"); - assertThat((double) f.boost(), closeTo(7.0, 0.001)); - - f = doc.getField("long_field"); - assertThat((double) f.boost(), closeTo(8.0, 0.001)); - - f = doc.getField("short_field"); - assertThat((double) f.boost(), closeTo(9.0, 0.001)); - } - - public void testBackCompatFieldLevelMappingBoost() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") - .startObject("str_field").field("type", "keyword").field("boost", "2.0").endObject() - .startObject("int_field").field("type", "integer").field("boost", "3.0").endObject() - .startObject("byte_field").field("type", "byte").field("boost", "4.0").endObject() - .startObject("date_field").field("type", "date").field("boost", "5.0").endObject() - .startObject("double_field").field("type", "double").field("boost", "6.0").endObject() - .startObject("float_field").field("type", "float").field("boost", "7.0").endObject() - .startObject("long_field").field("type", "long").field("boost", "8.0").endObject() - .startObject("short_field").field("type", "short").field("boost", "9.0").endObject() - .endObject().endObject().endObject() - .string(); - - { - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); - BytesReference json = XContentFactory.jsonBuilder().startObject() - .field("str_field", "some name") - .field("int_field", 10) - .field("byte_field", 20) - .field("date_field", "2012-01-10") - .field("double_field", 30.0) - .field("float_field", 40.0) - .field("long_field", 50) - .field("short_field", 60) - .endObject() - .bytes(); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - - IndexableField f = doc.getField("str_field"); - assertThat((double) f.boost(), closeTo(2.0, 0.001)); - - f = doc.getField("int_field"); - assertThat((double) f.boost(), closeTo(3.0, 0.001)); - - f = doc.getField("byte_field"); - assertThat((double) f.boost(), closeTo(4.0, 0.001)); - - f = doc.getField("date_field"); - assertThat((double) f.boost(), closeTo(5.0, 0.001)); - - f = doc.getField("double_field"); - assertThat((double) f.boost(), closeTo(6.0, 0.001)); - - f = doc.getField("float_field"); - assertThat((double) f.boost(), closeTo(7.0, 0.001)); - - f = doc.getField("long_field"); - assertThat((double) f.boost(), closeTo(8.0, 0.001)); - - f = doc.getField("short_field"); - assertThat((double) f.boost(), closeTo(9.0, 0.001)); - } - - { - DocumentMapper docMapper = createIndex("test2").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); - BytesReference json = XContentFactory.jsonBuilder().startObject() - .field("str_field", "some name") - .field("int_field", 10) - .field("byte_field", 20) - .field("date_field", "2012-01-10") - .field("double_field", 30.0) - .field("float_field", 40.0) - .field("long_field", 50) - .field("short_field", 60) - .endObject() - .bytes(); - Document doc = docMapper.parse("test", "person", "1", json).rootDoc(); - - IndexableField f = doc.getField("str_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("int_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("byte_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("date_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("double_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("float_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("long_field"); - assertThat(f.boost(), equalTo(1f)); - - f = doc.getField("short_field"); - assertThat(f.boost(), equalTo(1f)); - } - } - - public void testBackCompatInvalidFieldLevelBoost() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") - .startObject("str_field").field("type", "string").endObject() - .startObject("int_field").field("type", "integer").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("byte_field").field("type", "byte").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("date_field").field("type", "date").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("double_field").field("type", "double").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("float_field").field("type", "float").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("long_field").field("type", "long").startObject("norms").field("enabled", true).endObject().endObject() - .startObject("short_field").field("type", "short").startObject("norms").field("enabled", true).endObject().endObject() - .endObject().endObject().endObject() - .string(); - - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping)); - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("str_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("int_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("byte_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("date_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("double_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("float_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("long_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - try { - docMapper.parse("test", "person", "1", XContentFactory.jsonBuilder().startObject() - .startObject("short_field").field("foo", "bar") - .endObject().endObject().bytes()).rootDoc(); - fail(); - } catch (Exception ex) { - assertThat(ex, instanceOf(MapperParsingException.class)); - } - - } - -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index 229c295ab19..8e508d084e0 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -216,7 +216,7 @@ public class FieldNamesFieldMapperTests extends ESSingleNodeTestCase { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoEncodingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoEncodingTests.java deleted file mode 100644 index 4840dcc71a7..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoEncodingTests.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.unit.DistanceUnit.Distance; -import org.elasticsearch.test.ESTestCase; - -import java.util.Arrays; - -import static org.hamcrest.Matchers.lessThanOrEqualTo; - - -public class GeoEncodingTests extends ESTestCase { - - public void test() { - for (int i = 0; i < 10000; ++i) { - final double lat = randomDouble() * 180 - 90; - final double lon = randomDouble() * 360 - 180; - final Distance precision = new Distance(1+(randomDouble() * 9), randomFrom(Arrays.asList(DistanceUnit.MILLIMETERS, DistanceUnit.METERS, DistanceUnit.KILOMETERS))); - final LegacyGeoPointFieldMapper.Encoding encoding = LegacyGeoPointFieldMapper.Encoding.of(precision); - assertThat(encoding.precision().convert(DistanceUnit.METERS).value, lessThanOrEqualTo(precision.convert(DistanceUnit.METERS).value)); - final GeoPoint geoPoint = encoding.decode(encoding.encodeCoordinate(lat), encoding.encodeCoordinate(lon), new GeoPoint()); - final double error = GeoDistance.PLANE.calculate(lat, lon, geoPoint.lat(), geoPoint.lon(), DistanceUnit.METERS); - assertThat(error, lessThanOrEqualTo(precision.convert(DistanceUnit.METERS).value)); - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index a94ff589228..d2a7e5a902a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; @@ -31,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.SearchHitField; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; @@ -39,20 +37,15 @@ import org.elasticsearch.test.geo.RandomGeoGenerator; import org.hamcrest.CoreMatchers; import java.util.Collection; -import java.util.List; -import java.util.Map; import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { @@ -61,115 +54,10 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { return pluginList(InternalSettingsPlugin.class); } - public void testLegacyLatLonValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() - .endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - boolean indexCreatedBefore22 = version.before(Version.V_2_2_0); - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - final boolean stored = false; - assertThat(doc.rootDoc().getField("point.lat").fieldType().stored(), is(stored)); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon").fieldType().stored(), is(stored)); - assertThat(doc.rootDoc().getField("point.geohash"), nullValue()); - if (indexCreatedBefore22) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } - - public void testLegacyLatLonValuesWithGeohash() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("geohash", true).endObject().endObject() - .endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.onOrAfter(Version.V_5_0_0_alpha1)) { - assertThat(doc.rootDoc().getBinaryValue("point.geohash"), equalTo(new BytesRef(stringEncode(1.3, 1.2)))); - } else { - assertThat(doc.rootDoc().get("point.geohash"), equalTo(stringEncode(1.3, 1.2))); - } - } - - public void testLegacyLatLonInOneValueWithGeohash() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("geohash", true).endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", "1.2,1.3") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.onOrAfter(Version.V_5_0_0_alpha1)) { - assertThat(doc.rootDoc().getBinaryValue("point.geohash"), equalTo(new BytesRef(stringEncode(1.3, 1.2)))); - } else { - assertThat(doc.rootDoc().get("point.geohash"), equalTo(stringEncode(1.3, 1.2))); - } - } - - public void testLegacyGeoHashIndexValue() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("geohash", true).endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", stringEncode(1.3, 1.2)) - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.onOrAfter(Version.V_5_0_0_alpha1)) { - assertThat(doc.rootDoc().getBinaryValue("point.geohash"), equalTo(new BytesRef(stringEncode(1.3, 1.2)))); - } else { - assertThat(doc.rootDoc().get("point.geohash"), equalTo(stringEncode(1.3, 1.2))); - } - } - public void testGeoHashValue() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -181,204 +69,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - } - } - - public void testNormalizeLegacyLatLonValuesDefault() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - // default to normalize - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - mapping.field("coerce", true); - } - mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject(); - - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 91).field("lon", 181).endObject() - .endObject() - .bytes()); - - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("89.0,1.0")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(89.0, 1.0))); - } - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", -91).field("lon", -181).endObject() - .endObject() - .bytes()); - - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("-89.0,-1.0")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(-89.0, -1.0))); - } - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 181).field("lon", 361).endObject() - .endObject() - .bytes()); - - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("-1.0,-179.0")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(-1.0, -179.0))); - } - } - - public void testLegacyValidateLatLonValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true); - if (version.before(Version.V_2_2_0)) { - mapping.field("coerce", false); - } - mapping.field("ignore_malformed", false).endObject().endObject().endObject().endObject().string(); - - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 90).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", -91).field("lon", 1.3).endObject() - .endObject() - .bytes())); - - expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 91).field("lon", 1.3).endObject() - .endObject() - .bytes())); - - expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", -181).endObject() - .endObject() - .bytes())); - - expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", 181).endObject() - .endObject() - .bytes())); - - MapperParsingException e = expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", "-").field("lon", 1.3).endObject() - .endObject() - .bytes())); - assertThat(e.getRootCause(), instanceOf(NumberFormatException.class)); - assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\"")); - - e = expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", "-").endObject() - .endObject() - .bytes())); - assertThat(e.getRootCause(), instanceOf(NumberFormatException.class)); - assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\"")); - - e = expectThrows(MapperParsingException.class, () -> - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", "-").field("lon", "-").endObject() - .endObject() - .bytes())); - assertThat(e.getRootCause(), instanceOf(NumberFormatException.class)); - assertThat(e.getRootCause().toString(), containsString("java.lang.NumberFormatException: For input string: \"-\"")); - } - - public void testNoValidateLegacyLatLonValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true); - if (version.before(Version.V_2_2_0)) { - mapping.field("coerce", false); - } - mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject().string(); - - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 90).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", -91).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 91).field("lon", 1.3).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", -181).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", 181).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", "-").field("lon", 1.3).endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", 1.2).field("lon", "-").endObject() - .endObject() - .bytes()); - - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startObject("point").field("lat", "-").field("lon", "-").endObject() - .endObject() - .bytes()); } public void testLatLonValuesStored() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.CURRENT, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -390,27 +86,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); - assertThat(doc.rootDoc().getField("point.geohash"), nullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } } public void testArrayLatLonValues() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -426,38 +107,13 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { // doc values are enabled by default, but in this test we disable them; we should only have 2 points assertThat(doc.rootDoc().getFields("point"), notNullValue()); - if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); - } else { - assertThat(doc.rootDoc().getFields("point").length, equalTo(2)); - } - if (version.before(Version.V_5_0_0_alpha2)) { - assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); - } - } + assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); } public void testLatLonInOneValue() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", @@ -470,24 +126,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } } public void testLatLonInOneValueStored() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", @@ -499,27 +143,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .endObject() .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), - equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } } public void testLatLonInOneValueArray() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("doc_values", false); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", @@ -536,36 +165,13 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { // doc values are enabled by default, but in this test we disable them; we should only have 2 points assertThat(doc.rootDoc().getFields("point"), notNullValue()); - if (version.before(Version.V_5_0_0_alpha2)) { - assertThat(doc.rootDoc().getFields("point").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); - assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); - } else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); - } - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3")); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5")); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); - } + assertThat(doc.rootDoc().getFields("point").length, equalTo(4)); } public void testLonLatArray() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -577,15 +183,6 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } } public void testLonLatArrayDynamic() throws Exception { @@ -593,9 +190,6 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startArray("dynamic_templates").startObject().startObject("point").field("match", "point*") .startObject("mapping").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.endObject().endObject().endObject().endArray().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -607,24 +201,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } } public void testLonLatArrayStored() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).endObject().endObject().endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); @@ -636,28 +218,13 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .bytes()); assertThat(doc.rootDoc().getField("point"), notNullValue()); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getField("point.lat"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getField("point.lon"), notNullValue()); - assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - } else { - assertThat(doc.rootDoc().getFields("point").length, equalTo(3)); - } + assertThat(doc.rootDoc().getFields("point").length, equalTo(3)); } public void testLonLatArrayArrayStored() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true); - } String mapping = xContentBuilder.field("store", true).field("doc_values", false).endObject().endObject() .endObject().endObject().string(); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); @@ -673,209 +240,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { .bytes()); assertThat(doc.rootDoc().getFields("point"), notNullValue()); - if (version.before(Version.V_5_0_0_alpha2)) { - assertThat(doc.rootDoc().getFields("point").length, CoreMatchers.equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2)); - assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2)); - assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoPointField.encodeLatLon(1.2, 1.3))); - } - assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4)); - assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5)); - if (version.before(Version.V_2_2_0)) { - assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3")); - } else { - assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoPointField.encodeLatLon(1.4, 1.5))); - } - } else if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - assertThat(doc.rootDoc().getFields("point").length, CoreMatchers.equalTo(4)); - } - } - - - /** - * Test that expected exceptions are thrown when creating a new index with deprecated options - */ - public void testOptionDeprecation() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser(); - // test deprecation exceptions on newly created indexes - if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - try { - String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject() - .endObject().endObject().string(); - parser.parse("type", new CompressedXContent(normalizeMapping)); - } catch (MapperParsingException e) { - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [lat_lon : true]"); - } - } - - if (version.onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject().endObject().endObject() - .string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(normalizeMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [geohash : true]"); - } - - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") - .startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String validateMapping = xContentBuilder.field("validate", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(validateMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]"); - } - - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String validateMapping = xContentBuilder.field("validate_lat", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(validateMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]"); - } - - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String validateMapping = xContentBuilder.field("validate_lon", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(validateMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]"); - } - - // test deprecated normalize - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String normalizeMapping = xContentBuilder.field("normalize", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(normalizeMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]"); - } - - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String normalizeMapping = xContentBuilder.field("normalize_lat", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(normalizeMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]"); - } - - { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point"); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - xContentBuilder = xContentBuilder.field("lat_lon", true).field("geohash", true); - } - String normalizeMapping = xContentBuilder.field("normalize_lon", true).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(MapperParsingException.class, () -> - parser.parse("type", new CompressedXContent(normalizeMapping))); - assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]"); - } - } - - public void testLegacyGeoPointMapperMerge() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("geohash", true).endObject().endObject().endObject().endObject().string(); - MapperService mapperService = createIndex("test", settings).mapperService(); - DocumentMapper stage1 = mapperService.merge("type", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); - String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false) - .field("geohash", false).endObject().endObject().endObject().endObject().string(); - Exception e = expectThrows(IllegalArgumentException.class, () -> - mapperService.merge("type", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false)); - assertThat(e.getMessage(), containsString("mapper [point] has different [lat_lon]")); - assertThat(e.getMessage(), containsString("mapper [point] has different [geohash]")); - assertThat(e.getMessage(), containsString("mapper [point] has different [geohash_precision]")); - - // correct mapping and ensure no failures - String stage2MappingCorrect = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true) - .field("geohash", true).endObject().endObject().endObject().endObject().string(); - mapperService.merge("type", new CompressedXContent(stage2MappingCorrect), MapperService.MergeReason.MAPPING_UPDATE, false); - } - - public void testLegacyGeoHashSearch() throws Exception { - // create a geo_point mapping with geohash enabled and random (between 1 and 12) geohash precision - int precision = randomIntBetween(1, 12); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") - .field("type", "geo_point").field("geohash", true).field("geohash_precision", precision).field("store", true).endObject() - .endObject().endObject().endObject().string(); - - // create index and add a test point (dr5regy6rc6z) - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha1); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings) - .addMapping("pin", mapping); - mappingRequest.execute().actionGet(); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); - client().prepareIndex("test", "pin", "1").setSource(jsonBuilder().startObject().startObject("location").field("lat", 40.7143528) - .field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); - - // match all search with geohash field - SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); - Map m = searchResponse.getHits().getAt(0).getFields(); - - // ensure single geohash was indexed - assertEquals("dr5regy6rc6y".substring(0, precision), m.get("location.geohash").value()); - } - - public void testLegacyGeoHashSearchWithPrefix() throws Exception { - // create a geo_point mapping with geohash enabled and random (between 1 and 12) geohash precision - int precision = randomIntBetween(1, 12); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("pin").startObject("properties").startObject("location") - .field("type", "geo_point").field("geohash_prefix", true).field("geohash_precision", precision).field("store", true) - .endObject().endObject().endObject().endObject().string(); - - // create index and add a test point (dr5regy6rc6z) - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings) - .addMapping("pin", mapping); - mappingRequest.execute().actionGet(); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); - client().prepareIndex("test", "pin", "1").setSource(jsonBuilder().startObject().startObject("location").field("lat", 40.7143528) - .field("lon", -74.0059731).endObject().endObject()).setRefreshPolicy(IMMEDIATE).get(); - - // match all search with geohash field (includes prefixes) - SearchResponse searchResponse = client().prepareSearch().addStoredField("location.geohash").setQuery(matchAllQuery()).execute().actionGet(); - Map m = searchResponse.getHits().getAt(0).getFields(); - - List hashes = m.get("location.geohash").values(); - - final int numHashes = hashes.size(); - for(int i=0; i parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - DocumentMapperParser parser2x = createIndex("test_old", oldIndexSettings).mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser2x.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java index fac30002fbb..aba53d9e4cf 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java @@ -18,29 +18,11 @@ */ package org.elasticsearch.index.mapper; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.LegacyGeoPointFieldType; -import org.junit.Before; +import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.GeoPointFieldType; public class GeoPointFieldTypeTests extends FieldTypeTestCase { @Override protected MappedFieldType createDefaultFieldType() { - return new LegacyGeoPointFieldType(); - } - - @Before - public void setupProperties() { - addModifier(new Modifier("geohash", false) { - @Override - public void modify(MappedFieldType ft) { - ((LegacyGeoPointFieldType)ft).setGeoHashEnabled(new StringFieldMapper.StringFieldType(), 1, true); - } - }); - addModifier(new Modifier("lat_lon", false) { - @Override - public void modify(MappedFieldType ft) { - ((LegacyGeoPointFieldType)ft).setLatLonEnabled(new LegacyDoubleFieldMapper.DoubleFieldType(), - new LegacyDoubleFieldMapper.DoubleFieldType()); - } - }); + return new GeoPointFieldType(); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index 4a22d56e8a9..572188d7a5d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -451,16 +451,6 @@ public class GeoShapeFieldMapperTests extends ESSingleNodeTestCase { () -> parser.parse("type1", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - DocumentMapperParser parser2x = createIndex("test_old", oldIndexSettings).mapperService().documentMapperParser(); - - e = expectThrows(IllegalArgumentException.class, - () -> parser2x.parse("type1", new CompressedXContent(mapping)) - ); - assertThat(e.getMessage(), containsString("fieldName is required")); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java index eb74297e8e4..f0a0b818f9d 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/IndexFieldMapperTests.java @@ -70,13 +70,4 @@ public class IndexFieldMapperTests extends ESSingleNodeTestCase { assertEquals("_index is not configurable", e.getMessage()); } - public void testBwCompatIndexNotConfigurable() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_index").endObject() - .endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build()) - .mapperService().documentMapperParser(); - parser.parse("type", new CompressedXContent(mapping)); // no exception - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index 3d3a69ea800..682e487035b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -23,40 +23,30 @@ import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.network.InetAddresses; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.IpFieldMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import org.junit.Before; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; -import static org.hamcrest.Matchers.containsString; - import java.io.IOException; import java.net.InetAddress; import java.util.Collection; +import static org.hamcrest.Matchers.containsString; + public class IpFieldMapperTests extends ESSingleNodeTestCase { IndexService indexService; DocumentMapperParser parser; @Before - public void before() { + public void setup() { indexService = createIndex("test"); parser = indexService.mapperService().documentMapperParser(); } @@ -313,14 +303,5 @@ public class IpFieldMapperTests extends ESSingleNodeTestCase { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 396cbe49ee9..6d3c5bcbce1 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -24,23 +24,18 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import org.junit.Before; import java.io.IOException; import java.util.Arrays; import java.util.Collection; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -55,7 +50,7 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser; @Before - public void before() { + public void setup() { indexService = createIndex("test"); parser = indexService.mapperService().documentMapperParser(); } @@ -268,37 +263,6 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { assertEquals(mapping, mapper.mappingSource().toString()); } - public void testBoostImplicitlyEnablesNormsOnOldIndex() throws IOException { - indexService = createIndex("test2", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build()); - parser = indexService.mapperService().documentMapperParser(); - - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "keyword") - .field("boost", 2f) - .endObject() - .endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - - String expectedMapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("boost", 2f) - .field("index", "not_analyzed") - .field("norms", true) - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(); - assertEquals(expectedMapping, mapper.mappingSource().toString()); - } - public void testEnableNorms() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", "keyword").field("norms", true).endObject().endObject() @@ -334,24 +298,5 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // empty name allowed in index created before 5.0 - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - String downgradedMapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("") - .field("type", "string") - .field("index", "not_analyzed") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(); - assertEquals(downgradedMapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyByteFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyByteFieldTypeTests.java deleted file mode 100644 index 2f3a4ca6fec..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyByteFieldTypeTests.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.junit.Before; - -public class LegacyByteFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyByteFieldMapper.ByteFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue((byte)10); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - // bytes are stored as ints - assertEquals(Byte.valueOf((byte) 3), ft.valueForDisplay(Integer.valueOf(3))); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldMapperTests.java deleted file mode 100644 index 19f67c488b1..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldMapperTests.java +++ /dev/null @@ -1,495 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.LegacyNumericTokenStream.LegacyNumericTermAttribute; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.search.LegacyNumericRangeQuery; -import org.apache.lucene.util.Constants; -import org.elasticsearch.Version; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.LocaleUtils; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.junit.Before; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Locale; -import java.util.Map; - -import static com.carrotsearch.randomizedtesting.RandomizedTest.systemPropertyAsBoolean; -import static org.elasticsearch.index.mapper.LegacyStringMappingTests.docValuesType; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class LegacyDateFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testAutomaticDateParser() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").endObject() - .endObject().endObject().string(); - - IndexService index = createIndex("test", BW_SETTINGS); - client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); - DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field1", "2011/01/22") - .field("date_field2", "2011/01/22 00:00:00") - .field("wrong_date1", "-4") - .field("wrong_date2", "2012/2") - .field("wrong_date3", "2012/test") - .endObject() - .bytes()); - assertNotNull(doc.dynamicMappingsUpdate()); - client().admin().indices().preparePutMapping("test").setType("type").setSource(doc.dynamicMappingsUpdate().toString()).get(); - - defaultMapper = index.mapperService().documentMapper("type"); - FieldMapper fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field1"); - assertThat(fieldMapper, instanceOf(LegacyDateFieldMapper.class)); - LegacyDateFieldMapper dateFieldMapper = (LegacyDateFieldMapper)fieldMapper; - assertEquals("yyyy/MM/dd HH:mm:ss||yyyy/MM/dd||epoch_millis", dateFieldMapper.fieldType().dateTimeFormatter().format()); - assertEquals(1265587200000L, dateFieldMapper.fieldType().dateTimeFormatter().parser().parseMillis("1265587200000")); - fieldMapper = defaultMapper.mappers().smartNameFieldMapper("date_field2"); - assertThat(fieldMapper, instanceOf(LegacyDateFieldMapper.class)); - - fieldMapper = defaultMapper.mappers().smartNameFieldMapper("wrong_date1"); - assertThat(fieldMapper, instanceOf(StringFieldMapper.class)); - fieldMapper = defaultMapper.mappers().smartNameFieldMapper("wrong_date2"); - assertThat(fieldMapper, instanceOf(StringFieldMapper.class)); - fieldMapper = defaultMapper.mappers().smartNameFieldMapper("wrong_date3"); - assertThat(fieldMapper, instanceOf(StringFieldMapper.class)); - } - - public void testParseLocal() { - assertThat(Locale.GERMAN, equalTo(LocaleUtils.parse("de"))); - assertThat(Locale.GERMANY, equalTo(LocaleUtils.parse("de_DE"))); - assertThat(new Locale("de","DE","DE"), equalTo(LocaleUtils.parse("de_DE_DE"))); - - try { - LocaleUtils.parse("de_DE_DE_DE"); - fail(); - } catch(IllegalArgumentException ex) { - // expected - } - assertThat(Locale.ROOT, equalTo(LocaleUtils.parse(""))); - assertThat(Locale.ROOT, equalTo(LocaleUtils.parse("ROOT"))); - } - - public void testLocale() throws IOException { - assumeFalse("Locals are buggy on JDK9EA", Constants.JRE_IS_MINIMUM_JAVA9 && systemPropertyAsBoolean("tests.security.manager", false)); - String mapping = XContentFactory.jsonBuilder() - .startObject() - .startObject("type") - .startObject("properties") - .startObject("date_field_default") - .field("type", "date") - .field("format", "E, d MMM yyyy HH:mm:ss Z") - .endObject() - .startObject("date_field_en") - .field("type", "date") - .field("format", "E, d MMM yyyy HH:mm:ss Z") - .field("locale", "EN") - .endObject() - .startObject("date_field_de") - .field("type", "date") - .field("format", "E, d MMM yyyy HH:mm:ss Z") - .field("locale", "DE_de") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field_en", "Wed, 06 Dec 2000 02:55:00 -0800") - .field("date_field_de", "Mi, 06 Dez 2000 02:55:00 -0800") - .field("date_field_default", "Wed, 06 Dec 2000 02:55:00 -0800") // check default - no exception is a success! - .endObject() - .bytes()); - assertNumericTokensEqual(doc, defaultMapper, "date_field_en", "date_field_de"); - assertNumericTokensEqual(doc, defaultMapper, "date_field_en", "date_field_default"); - } - - @Before - public void reset() { - i = 0; - } - - int i = 0; - - private DocumentMapper mapper(String indexName, String type, String mapping) throws IOException { - IndexService index = createIndex(indexName, BW_SETTINGS); - client().admin().indices().preparePutMapping(indexName).setType(type).setSource(mapping).get(); - return index.mapperService().documentMapper(type); - } - - private void assertNumericTokensEqual(ParsedDocument doc, DocumentMapper defaultMapper, String fieldA, String fieldB) throws IOException { - assertThat(doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue()); - assertThat(doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue()); - - TokenStream tokenStream = doc.rootDoc().getField(fieldA).tokenStream(defaultMapper.mappers().indexAnalyzer(), null); - tokenStream.reset(); - LegacyNumericTermAttribute nta = tokenStream.addAttribute(LegacyNumericTermAttribute.class); - List values = new ArrayList<>(); - while(tokenStream.incrementToken()) { - values.add(nta.getRawValue()); - } - - tokenStream = doc.rootDoc().getField(fieldB).tokenStream(defaultMapper.mappers().indexAnalyzer(), null); - tokenStream.reset(); - nta = tokenStream.addAttribute(LegacyNumericTermAttribute.class); - int pos = 0; - while(tokenStream.incrementToken()) { - assertThat(values.get(pos++), equalTo(nta.getRawValue())); - } - assertThat(pos, equalTo(values.size())); - } - - public void testTimestampAsDate() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - long value = System.currentTimeMillis(); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", value) - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("date_field").tokenStream(defaultMapper.mappers().indexAnalyzer(), null), notNullValue()); - } - - public void testDateDetection() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .field("date_detection", false) - .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2010-01-01") - .field("date_field_x", "2010-01-01") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().get("date_field"), equalTo("1262304000000")); - assertThat(doc.rootDoc().get("date_field_x"), equalTo("2010-01-01")); - } - - public void testHourFormat() throws Exception { - long nowInMillis = randomPositiveLong(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).build(); - QueryShardContext context = new QueryShardContext(0, - new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), null, null, null, null, - null, null, null, null, null, () -> nowInMillis); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .field("date_detection", false) - .startObject("properties").startObject("date_field").field("type", "date").field("format", "HH:mm:ss").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "10:00:00") - .endObject() - .bytes()); - assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis()))); - - LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType() - .rangeQuery("10:00:00", "11:00:00", true, true, context); - assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(11).millis(), DateTimeZone.UTC).getMillis() + 999)); - assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(10).millis(), DateTimeZone.UTC).getMillis())); - } - - public void testDayWithoutYearFormat() throws Exception { - long nowInMillis = randomPositiveLong(); - Settings indexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).build(); - QueryShardContext context = new QueryShardContext(0, - new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), null, null, null, null, - null, null, null, null, null, () -> nowInMillis); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .field("date_detection", false) - .startObject("properties").startObject("date_field").field("type", "date").field("format", "MMM dd HH:mm:ss").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "Jan 02 10:00:00") - .endObject() - .bytes()); - assertThat(((LegacyLongFieldMapper.CustomLongNumericField) doc.rootDoc().getField("date_field")).numericAsString(), equalTo(Long.toString(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis()))); - - LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType() - .rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true, context); - assertThat(rangeQuery.getMax(), equalTo(new DateTime(TimeValue.timeValueHours(35).millis() + 999, DateTimeZone.UTC).getMillis())); - assertThat(rangeQuery.getMin(), equalTo(new DateTime(TimeValue.timeValueHours(34).millis(), DateTimeZone.UTC).getMillis())); - } - - public void testIgnoreMalformedOption() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "date").field("ignore_malformed", true).endObject() - .startObject("field2").field("type", "date").field("ignore_malformed", false).endObject() - .startObject("field3").field("type", "date").endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "a") - .field("field2", "2010-01-01") - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("field1"), nullValue()); - assertThat(doc.rootDoc().getField("field2"), notNullValue()); - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field2", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), is("failed to parse [field2]")); - } - - // Verify that the default is false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field3", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), is("failed to parse [field3]")); - } - - // Unless the global ignore_malformed option is set to true - Settings indexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) - .put("index.mapping.ignore_malformed", true) - .build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field3", "a") - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("field3"), nullValue()); - - // This should still throw an exception, since field2 is specifically set to ignore_malformed=false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field2", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), is("failed to parse [field2]")); - } - } - - public void testThatMergingWorks() throws Exception { - String initialMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field").field("type", "date") - .field("format", "EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy") - .endObject() - .endObject() - .endObject().endObject().string(); - - String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "date") - .field("format", "EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy||yyyy-MM-dd'T'HH:mm:ss.SSSZZ") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test1", "type", initialMapping); - DocumentMapper mergeMapper = mapper("test2", "type", updatedMapping); - - assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(LegacyDateFieldMapper.class))); - LegacyDateFieldMapper initialDateFieldMapper = (LegacyDateFieldMapper) defaultMapper.mappers().getMapper("field"); - Map config = getConfigurationViaXContent(initialDateFieldMapper); - assertThat(config.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy")); - - defaultMapper = defaultMapper.merge(mergeMapper.mapping(), false); - - assertThat(defaultMapper.mappers().getMapper("field"), is(instanceOf(LegacyDateFieldMapper.class))); - - LegacyDateFieldMapper mergedFieldMapper = (LegacyDateFieldMapper) defaultMapper.mappers().getMapper("field"); - Map mergedConfig = getConfigurationViaXContent(mergedFieldMapper); - assertThat(mergedConfig.get("format"), is("EEE MMM dd HH:mm:ss.S Z yyyy||EEE MMM dd HH:mm:ss.SSS Z yyyy||yyyy-MM-dd'T'HH:mm:ss.SSSZZ")); - } - - public void testDefaultDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test", "type", mapping); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2010-01-01") - .endObject() - .bytes()); - ParseContext.Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.SORTED_NUMERIC, docValuesType(doc, "date_field")); - } - - private Map getConfigurationViaXContent(LegacyDateFieldMapper dateFieldMapper) throws IOException { - XContentBuilder builder = JsonXContent.contentBuilder().startObject(); - dateFieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); - Map dateFieldMapperMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - dateFieldMapperMap = parser.map(); - } - assertThat(dateFieldMapperMap, hasKey("field")); - assertThat(dateFieldMapperMap.get("field"), is(instanceOf(Map.class))); - return (Map) dateFieldMapperMap.get("field"); - } - - private static long getDateAsMillis(Document doc, String field) { - for (IndexableField f : doc.getFields(field)) { - if (f.numericValue() != null) { - return f.numericValue().longValue(); - } - } - throw new AssertionError("missing"); - } - - public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").field("format", "yyyyMMddHH").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = mapper("test1", "type", mapping); - - XContentBuilder document = XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2015060210") - .endObject(); - ParsedDocument doc = defaultMapper.parse("test", "type", "1", document.bytes()); - assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L)); - IndexResponse indexResponse = client().prepareIndex("test2", "test").setSource(document).get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - - // integers should always be parsed as well... cannot be sure it is a unix timestamp only - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", 2015060210) - .endObject() - .bytes()); - assertThat(getDateAsMillis(doc.rootDoc(), "date_field"), equalTo(1433239200000L)); - indexResponse = client().prepareIndex("test", "test").setSource(document).get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - } - - public void testThatNewIndicesOnlyAllowStrictDates() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("date_field").field("type", "date").endObject().endObject() - .endObject().endObject().string(); - - IndexService index = createIndex("test", BW_SETTINGS); - client().admin().indices().preparePutMapping("test").setType("type").setSource(mapping).get(); - assertDateFormat(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.format()); - DocumentMapper defaultMapper = index.mapperService().documentMapper("type"); - - // also test normal date - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "2015-06-06T00:00:44.000Z") - .endObject() - .bytes()); - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("date_field", "1-1-1T00:00:44.000Z") - .endObject() - .bytes()); - fail("non strict date indexing should have been failed"); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - } - - private void assertDateFormat(String expectedFormat) throws IOException { - GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").setTypes("type").get(); - Map mappingMap = response.getMappings().get("test").get("type").getSourceAsMap(); - Map properties = (Map) mappingMap.get("properties"); - Map dateField = (Map) properties.get("date_field"); - assertThat((String) dateField.get("format"), is(expectedFormat)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldTypeTests.java deleted file mode 100644 index 10a2a331a79..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDateFieldTypeTests.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.apache.lucene.document.Field.Store; -import org.apache.lucene.document.LegacyLongField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.MultiReader; -import org.apache.lucene.store.Directory; -import org.apache.lucene.util.IOUtils; -import org.elasticsearch.common.joda.DateMathParser; -import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.index.mapper.LegacyDateFieldMapper.DateFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.query.QueryRewriteContext; -import org.joda.time.DateTimeZone; -import org.junit.Before; - -import java.io.IOException; -import java.util.Locale; -import java.util.concurrent.TimeUnit; - -public class LegacyDateFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyDateFieldMapper.DateFieldType(); - } - - private static long nowInMillis; - - @Before - public void setupProperties() { - setDummyNullValue(10); - addModifier(new Modifier("format", true) { - @Override - public void modify(MappedFieldType ft) { - ((LegacyDateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT)); - } - }); - addModifier(new Modifier("locale", true) { - @Override - public void modify(MappedFieldType ft) { - ((LegacyDateFieldMapper.DateFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA)); - } - }); - addModifier(new Modifier("numeric_resolution", true) { - @Override - public void modify(MappedFieldType ft) { - ((LegacyDateFieldMapper.DateFieldType)ft).setTimeUnit(TimeUnit.HOURS); - } - }); - nowInMillis = randomPositiveLong(); - } - - public void testIsFieldWithinQueryEmptyReader() throws IOException { - IndexReader reader = new MultiReader(); - DateFieldType ft = new DateFieldType(); - ft.setName("my_date"); - assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - randomBoolean(), randomBoolean(), null, null, null)); - } - - private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader, - DateTimeZone zone, DateMathParser alternateFormat) throws IOException { - QueryRewriteContext context = new QueryRewriteContext(null, null, null, null, null, null, null, () -> nowInMillis); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-06-20", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2016-01-02", "2016-02-12", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2014-01-02", "2015-02-12", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.DISJOINT, ft.isFieldWithinQuery(reader, "2016-05-11", "2016-08-30", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-09-25", "2016-05-29", - randomBoolean(), randomBoolean(), null, null, context)); - assertEquals(Relation.WITHIN, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - true, true, null, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - false, false, null, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - false, true, null, null, context)); - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-12", "2016-04-03", - true, false, null, null, context)); - } - - public void testIsFieldWithinQuery() throws IOException { - Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); - long instant1 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12").getMillis(); - long instant2 = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2016-04-03").getMillis(); - Document doc = new Document(); - LegacyLongField field = new LegacyLongField("my_date", instant1, Store.NO); - doc.add(field); - w.addDocument(doc); - field.setLongValue(instant2); - w.addDocument(doc); - DirectoryReader reader = DirectoryReader.open(w); - DateFieldType ft = new DateFieldType(); - ft.setName("my_date"); - DateMathParser alternateFormat = new DateMathParser(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER); - doTestIsFieldWithinQuery(ft, reader, null, null); - doTestIsFieldWithinQuery(ft, reader, null, alternateFormat); - doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, null); - doTestIsFieldWithinQuery(ft, reader, DateTimeZone.UTC, alternateFormat); - IOUtils.close(reader, w, dir); - } - - public void testValueFormat() { - MappedFieldType ft = createDefaultFieldType(); - long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-12T14:10:55").getMillis(); - assertEquals("2015-10-12T14:10:55.000Z", - ft.docValueFormat(null, DateTimeZone.UTC).format(instant)); - assertEquals("2015-10-12T15:10:55.000+01:00", - ft.docValueFormat(null, DateTimeZone.forOffsetHours(1)).format(instant)); - assertEquals("2015", - createDefaultFieldType().docValueFormat("YYYY", DateTimeZone.UTC).format(instant)); - assertEquals(instant, - ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", false, null)); - assertEquals(instant + 999, - ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12T14:10:55", true, null)); - assertEquals(LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime("2015-10-13").getMillis() - 1, - ft.docValueFormat(null, DateTimeZone.UTC).parseLong("2015-10-12||/d", true, null)); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - String date = "2015-10-12T12:09:55.000Z"; - long instant = LegacyDateFieldMapper.Defaults.DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis(); - assertEquals(date, ft.valueForDisplay(instant)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDoubleFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyDoubleFieldTypeTests.java deleted file mode 100644 index 93ea0eb35fc..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyDoubleFieldTypeTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper.DoubleFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.junit.Before; - -import java.io.IOException; - -public class LegacyDoubleFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyDoubleFieldMapper.DoubleFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue(10.0D); - } - - public void testIsFieldWithinQuery() throws IOException { - DoubleFieldType ft = new DoubleFieldType(); - // current impl ignores args and shourd always return INTERSECTS - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomDouble(), randomDouble(), - randomBoolean(), randomBoolean(), null, null, null)); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - assertEquals(Double.valueOf(1.2), ft.valueForDisplay(1.2)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyFloatFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyFloatFieldTypeTests.java deleted file mode 100644 index a476c81fb47..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyFloatFieldTypeTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.index.mapper.LegacyFloatFieldMapper.FloatFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.junit.Before; - -import java.io.IOException; - -public class LegacyFloatFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyFloatFieldMapper.FloatFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue(10.0f); - } - - public void testIsFieldWithinQuery() throws IOException { - FloatFieldType ft = new FloatFieldType(); - // current impl ignores args and shourd always return INTERSECTS - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomFloat(), randomFloat(), - randomBoolean(), randomBoolean(), null, null, null)); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - assertEquals(Float.valueOf(1.2f), ft.valueForDisplay(1.2f)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyGeohashMappingGeoPointTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyGeohashMappingGeoPointTests.java deleted file mode 100644 index a4d61956a6c..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyGeohashMappingGeoPointTests.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper.LegacyGeoPointFieldType; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; - -import java.util.Collection; - -import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class LegacyGeohashMappingGeoPointTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testGeoHashValue() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) - .endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser() - .parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("point", stringEncode(1.3, 1.2)) - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("point.lat"), nullValue()); - assertThat(doc.rootDoc().getField("point.lon"), nullValue()); - assertThat(doc.rootDoc().getField("point.geohash").stringValue(), equalTo(stringEncode(1.3, 1.2))); - assertThat(doc.rootDoc().get("point"), notNullValue()); - } - - public void testGeoHashPrecisionAsInteger() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) - .field("geohash_precision", 10).endObject().endObject().endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser() - .parse("type", new CompressedXContent(mapping)); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); - assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class)); - BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper; - assertThat(((LegacyGeoPointFieldType)geoPointFieldMapper.fieldType()).geoHashPrecision(), is(10)); - } - - public void testGeoHashPrecisionAsLength() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true) - .field("geohash_precision", "5m").endObject().endObject() - .endObject().endObject().string(); - - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser() - .parse("type", new CompressedXContent(mapping)); - FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point"); - assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class)); - BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper; - assertThat(((LegacyGeoPointFieldType)geoPointFieldMapper.fieldType()).geoHashPrecision(), is(10)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyIntegerFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyIntegerFieldTypeTests.java deleted file mode 100644 index 7fd6cfcfba8..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyIntegerFieldTypeTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper.IntegerFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.junit.Before; - -import java.io.IOException; - -public class LegacyIntegerFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyIntegerFieldMapper.IntegerFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue(10); - } - - public void testIsFieldWithinQuery() throws IOException { - IntegerFieldType ft = new IntegerFieldType(); - // current impl ignores args and shourd always return INTERSECTS - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomInt(), randomInt(), - randomBoolean(), randomBoolean(), null, null, null)); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - assertEquals(Integer.valueOf(3), ft.valueForDisplay(Integer.valueOf(3))); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyIpFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyIpFieldMapperTests.java deleted file mode 100644 index a78cb7a7177..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyIpFieldMapperTests.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.LegacyIpFieldMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Collection; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class LegacyIpFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testSimpleMapping() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("ip").field("type", "ip").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("ip", "127.0.0.1") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("ip").numericValue().longValue(), is(2130706433L)); - assertThat(doc.rootDoc().get("ip"), is("2130706433")); - } - - public void testThatValidIpCanBeConvertedToLong() throws Exception { - assertThat(LegacyIpFieldMapper.ipToLong("127.0.0.1"), is(2130706433L)); - } - - public void testThatInvalidIpThrowsException() throws Exception { - try { - LegacyIpFieldMapper.ipToLong("127.0.011.1111111"); - fail("Expected ip address parsing to fail but did not happen"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("not a valid ip address")); - } - } - - public void testThatIpv6AddressThrowsException() throws Exception { - try { - LegacyIpFieldMapper.ipToLong("2001:db8:0:8d3:0:8a2e:70:7344"); - fail("Expected ip address parsing to fail but did not happen"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("not a valid ipv4 address")); - } - } - - public void testIgnoreMalformedOption() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties").startObject("field1") - .field("type", "ip").field("ignore_malformed", true).endObject().startObject("field2").field("type", "ip") - .field("ignore_malformed", false).endObject().startObject("field3").field("type", "ip").endObject().endObject().endObject() - .endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", - XContentFactory.jsonBuilder().startObject().field("field1", "").field("field2", "10.20.30.40").endObject().bytes()); - assertThat(doc.rootDoc().getField("field1"), nullValue()); - assertThat(doc.rootDoc().getField("field2"), notNullValue()); - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field2", "").endObject().bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - - // Verify that the default is false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field3", "").endObject().bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - - // Unless the global ignore_malformed option is set to true - Settings indexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) - .put("index.mapping.ignore_malformed", true) - .build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field3", "").endObject().bytes()); - assertThat(doc.rootDoc().getField("field3"), nullValue()); - - // This should still throw an exception, since field2 is specifically set to ignore_malformed=false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject().field("field2", "").endObject().bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - } - -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyLongFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyLongFieldTypeTests.java deleted file mode 100644 index 2177bcff675..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyLongFieldTypeTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.index.mapper.LegacyLongFieldMapper.LongFieldType; -import org.elasticsearch.index.mapper.MappedFieldType.Relation; -import org.junit.Before; - -import java.io.IOException; - -public class LegacyLongFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyLongFieldMapper.LongFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue((long)10); - } - - public void testIsFieldWithinQuery() throws IOException { - LongFieldType ft = new LongFieldType(); - // current impl ignores args and shourd always return INTERSECTS - assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(null, randomLong(), randomLong(), - randomBoolean(), randomBoolean(), null, null, null)); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - assertEquals(Long.valueOf(3), ft.valueForDisplay(Long.valueOf(3))); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapperTests.java deleted file mode 100644 index 1ce13d5137a..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyNumberFieldMapperTests.java +++ /dev/null @@ -1,620 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.LegacyNumericTokenStream; -import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.document.Field; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.LegacyFloatFieldMapper; -import org.elasticsearch.index.mapper.LegacyLongFieldMapper; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class LegacyNumberFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testIgnoreMalformedOption() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1").field("type", "integer").field("ignore_malformed", true).endObject() - .startObject("field2").field("type", "integer").field("ignore_malformed", false).endObject() - .startObject("field3").field("type", "integer").endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "a") - .field("field2", "1") - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("field1"), nullValue()); - assertThat(doc.rootDoc().getField("field2"), notNullValue()); - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field2", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(NumberFormatException.class)); - } - - // Verify that the default is false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field3", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(NumberFormatException.class)); - } - - // Unless the global ignore_malformed option is set to true - Settings indexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) - .put("index.mapping.ignore_malformed", true).build(); - defaultMapper = createIndex("test2", indexSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field3", "a") - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("field3"), nullValue()); - - // This should still throw an exception, since field2 is specifically set to ignore_malformed=false - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field2", "a") - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(NumberFormatException.class)); - } - } - - public void testCoerceOption() throws Exception { - String [] nonFractionNumericFieldTypes={"integer","long","short"}; - //Test co-ercion policies on all non-fraction numerics - DocumentMapperParser parser = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser(); - for (String nonFractionNumericFieldType : nonFractionNumericFieldTypes) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("noErrorNoCoerceField").field("type", nonFractionNumericFieldType).field("ignore_malformed", true) - .field("coerce", false).endObject() - .startObject("noErrorCoerceField").field("type", nonFractionNumericFieldType).field("ignore_malformed", true) - .field("coerce", true).endObject() - .startObject("errorDefaultCoerce").field("type", nonFractionNumericFieldType).field("ignore_malformed", false).endObject() - .startObject("errorNoCoerce").field("type", nonFractionNumericFieldType).field("ignore_malformed", false) - .field("coerce", false).endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - //Test numbers passed as strings - String invalidJsonNumberAsString="1"; - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("noErrorNoCoerceField", invalidJsonNumberAsString) - .field("noErrorCoerceField", invalidJsonNumberAsString) - .field("errorDefaultCoerce", invalidJsonNumberAsString) - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("noErrorNoCoerceField"), nullValue()); - assertThat(doc.rootDoc().getField("noErrorCoerceField"), notNullValue()); - //Default is ignore_malformed=true and coerce=true - assertThat(doc.rootDoc().getField("errorDefaultCoerce"), notNullValue()); - - //Test valid case of numbers passed as numbers - int validNumber=1; - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("noErrorNoCoerceField", validNumber) - .field("noErrorCoerceField", validNumber) - .field("errorDefaultCoerce", validNumber) - .endObject() - .bytes()); - assertEquals(validNumber,doc.rootDoc().getField("noErrorNoCoerceField").numericValue().intValue()); - assertEquals(validNumber,doc.rootDoc().getField("noErrorCoerceField").numericValue().intValue()); - assertEquals(validNumber,doc.rootDoc().getField("errorDefaultCoerce").numericValue().intValue()); - - //Test valid case of negative numbers passed as numbers - int validNegativeNumber=-1; - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("noErrorNoCoerceField", validNegativeNumber) - .field("noErrorCoerceField", validNegativeNumber) - .field("errorDefaultCoerce", validNegativeNumber) - .endObject() - .bytes()); - assertEquals(validNegativeNumber,doc.rootDoc().getField("noErrorNoCoerceField").numericValue().intValue()); - assertEquals(validNegativeNumber,doc.rootDoc().getField("noErrorCoerceField").numericValue().intValue()); - assertEquals(validNegativeNumber,doc.rootDoc().getField("errorDefaultCoerce").numericValue().intValue()); - - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("errorNoCoerce", invalidJsonNumberAsString) - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - - - //Test questionable case of floats passed to ints - float invalidJsonForInteger=1.9f; - int coercedFloatValue=1; //This is what the JSON parser will do to a float - truncate not round - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("noErrorNoCoerceField", invalidJsonForInteger) - .field("noErrorCoerceField", invalidJsonForInteger) - .field("errorDefaultCoerce", invalidJsonForInteger) - .endObject() - .bytes()); - assertThat(doc.rootDoc().getField("noErrorNoCoerceField"), nullValue()); - assertEquals(coercedFloatValue,doc.rootDoc().getField("noErrorCoerceField").numericValue().intValue()); - //Default is ignore_malformed=true and coerce=true - assertEquals(coercedFloatValue,doc.rootDoc().getField("errorDefaultCoerce").numericValue().intValue()); - - try { - defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("errorNoCoerce", invalidJsonForInteger) - .endObject() - .bytes()); - } catch (MapperParsingException e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - } - } - } - - public void testDocValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("int1") - .field("type", "integer") - .endObject() - .startObject("int2") - .field("type", "integer") - .field("index", "no") - .endObject() - .startObject("double1") - .field("type", "double") - .endObject() - .startObject("double2") - .field("type", "integer") - .field("index", "no") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("int1", "1234") - .field("double1", "1234") - .field("int2", "1234") - .field("double2", "1234") - .endObject() - .bytes()); - Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "int1")); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "double1")); - assertEquals(DocValuesType.NONE, LegacyStringMappingTests.docValuesType(doc, "int2")); - assertEquals(DocValuesType.NONE, LegacyStringMappingTests.docValuesType(doc, "double2")); - } - - public void testUnIndex() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("int") - .field("type", "integer") - .field("index", false) - .endObject() - .startObject("double") - .field("type", "double") - .field("index", false) - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - assertEquals("{\"type\":{\"properties\":{\"double\":{\"type\":\"double\",\"index\":false},\"int\":{\"type\":\"integer\",\"index\":false}}}}", - defaultMapper.mapping().toString()); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("int", "1234") - .field("double", "1234") - .endObject() - .bytes()); - final Document doc = parsedDoc.rootDoc(); - for (IndexableField field : doc.getFields("int")) { - assertEquals(IndexOptions.NONE, field.fieldType().indexOptions()); - } - for (IndexableField field : doc.getFields("double")) { - assertEquals(IndexOptions.NONE, field.fieldType().indexOptions()); - } - } - - public void testBwCompatIndex() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("int") - .field("type", "integer") - .field("index", "no") - .endObject() - .startObject("double") - .field("type", "double") - .field("index", "not_analyzed") - .endObject() - .endObject() - .endObject().endObject().string(); - - Settings oldSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build(); - DocumentMapper defaultMapper = createIndex("test", oldSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertEquals("{\"type\":{\"properties\":{\"double\":{\"type\":\"double\"},\"int\":{\"type\":\"integer\",\"index\":false}}}}", - defaultMapper.mapping().toString()); - } - - public void testDocValuesOnNested() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("nested") - .field("type", "nested") - .startObject("properties") - .startObject("int") - .field("type", "integer") - .field("doc_values", true) - .endObject() - .startObject("double") - .field("type", "double") - .field("doc_values", true) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .startArray("nested") - .startObject() - .field("int", "1234") - .field("double", "1234") - .endObject() - .startObject() - .field("int", "-1") - .field("double", "-2") - .endObject() - .endArray() - .endObject() - .bytes()); - for (Document doc : parsedDoc.docs()) { - if (doc == parsedDoc.rootDoc()) { - continue; - } - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "nested.int")); - assertEquals(DocValuesType.SORTED_NUMERIC, LegacyStringMappingTests.docValuesType(doc, "nested.double")); - } - } - - /** Test default precision step for autodetected numeric types */ - public void testPrecisionStepDefaultsDetected() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .field("numeric_detection", true) - .field("date_detection", true) - .endObject().endObject().string(); - - DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("long", "100") - .field("double", "100.0") - .field("date", "2010-01-01") - .endObject() - .bytes()); - - assertEquals(1, doc.docs().size()); - Document luceneDoc = doc.docs().get(0); - - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("long")); - assertThat(luceneDoc.getField("double").numericValue(), instanceOf(Float.class)); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("double")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("date")); - } - - /** Test default precision step for numeric types */ - public void testPrecisionStepDefaultsMapped() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("int") - .field("type", "integer") - .endObject() - .startObject("float") - .field("type", "float") - .endObject() - .startObject("long") - .field("type", "long") - .endObject() - .startObject("double") - .field("type", "double") - .endObject() - .startObject("short") - .field("type", "short") - .endObject() - .startObject("byte") - .field("type", "byte") - .endObject() - .startObject("date") - .field("type", "date") - .endObject() - .startObject("ip") - .field("type", "ip") - .endObject() - - .endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("int", "100") - .field("float", "100.0") - .field("long", "5000") - .field("double", "34.545") - .field("short", "1645") - .field("byte", "50") - .field("date", "2010-01-01") - .field("ip", "255.255.255.255") - .endObject() - .bytes()); - - assertEquals(1, doc.docs().size()); - Document luceneDoc = doc.docs().get(0); - - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("long")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("double")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("date")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_64_BIT, luceneDoc.getField("ip")); - - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("int")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_32_BIT, luceneDoc.getField("float")); - - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_16_BIT, luceneDoc.getField("short")); - assertPrecisionStepEquals(LegacyNumberFieldMapper.Defaults.PRECISION_STEP_8_BIT, luceneDoc.getField("byte")); - } - - /** Test precision step set to silly explicit values */ - public void testPrecisionStepExplicit() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("int") - .field("type", "integer") - .field("precision_step", "1") - .endObject() - .startObject("float") - .field("type", "float") - .field("precision_step", "2") - .endObject() - .startObject("long") - .field("type", "long") - .field("precision_step", "1") - .endObject() - .startObject("double") - .field("type", "double") - .field("precision_step", "2") - .endObject() - .startObject("short") - .field("type", "short") - .field("precision_step", "1") - .endObject() - .startObject("byte") - .field("type", "byte") - .field("precision_step", "2") - .endObject() - .startObject("date") - .field("type", "date") - .field("precision_step", "1") - .endObject() - .startObject("ip") - .field("type", "ip") - .field("precision_step", "2") - .endObject() - - .endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("int", "100") - .field("float", "100.0") - .field("long", "5000") - .field("double", "34.545") - .field("short", "1645") - .field("byte", "50") - .field("date", "2010-01-01") - .field("ip", "255.255.255.255") - .endObject() - .bytes()); - - assertEquals(1, doc.docs().size()); - Document luceneDoc = doc.docs().get(0); - - assertPrecisionStepEquals(1, luceneDoc.getField("int")); - assertPrecisionStepEquals(2, luceneDoc.getField("float")); - assertPrecisionStepEquals(1, luceneDoc.getField("long")); - assertPrecisionStepEquals(2, luceneDoc.getField("double")); - assertPrecisionStepEquals(1, luceneDoc.getField("short")); - assertPrecisionStepEquals(2, luceneDoc.getField("byte")); - assertPrecisionStepEquals(1, luceneDoc.getField("date")); - assertPrecisionStepEquals(2, luceneDoc.getField("ip")); - - } - - /** checks precisionstep on both the fieldtype and the tokenstream */ - private static void assertPrecisionStepEquals(int expected, IndexableField field) throws IOException { - assertNotNull(field); - assertThat(field, instanceOf(Field.class)); - - // check fieldtype's precisionstep - assertEquals(expected, ((Field)field).fieldType().numericPrecisionStep()); - - // check the tokenstream actually used by the indexer - TokenStream ts = field.tokenStream(null, null); - assertThat(ts, instanceOf(LegacyNumericTokenStream.class)); - assertEquals(expected, ((LegacyNumericTokenStream)ts).getPrecisionStep()); - } - - public void testTermVectorsBackCompat() throws Exception { - for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { - doTestTermVectorsBackCompat(type); - } - } - - private void doTestTermVectorsBackCompat(String type) throws Exception { - DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); - String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("foo") - .field("type", type) - .field("term_vector", "yes") - .endObject() - .endObject().endObject().endObject().string(); - try { - parser.parse("type", new CompressedXContent(mappingWithTV)); - fail(); - } catch (MapperParsingException e) { - assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [term_vector : yes]")); - } - - Settings oldIndexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) - .build(); - parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser(); - parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception - } - - public void testAnalyzerBackCompat() throws Exception { - for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { - doTestAnalyzerBackCompat(type); - } - } - - private void doTestAnalyzerBackCompat(String type) throws Exception { - DocumentMapperParser parser = createIndex("index-" + type).mapperService().documentMapperParser(); - String mappingWithTV = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("foo") - .field("type", type) - .field("analyzer", "keyword") - .endObject() - .endObject().endObject().endObject().string(); - try { - parser.parse("type", new CompressedXContent(mappingWithTV)); - fail(); - } catch (MapperParsingException e) { - assertThat(e.getMessage(), containsString("Mapping definition for [foo] has unsupported parameters: [analyzer : keyword]")); - } - - Settings oldIndexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) - .build(); - parser = createIndex("index2-" + type, oldIndexSettings).mapperService().documentMapperParser(); - parser.parse("type", new CompressedXContent(mappingWithTV)); // no exception - } - - - public void testIgnoreFielddata() throws IOException { - for (String type : Arrays.asList("byte", "short", "integer", "long", "float", "double")) { - Settings oldIndexSettings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0) - .build(); - DocumentMapperParser parser = createIndex("index-" + type, oldIndexSettings).mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("foo") - .field("type", type) - .startObject("fielddata") - .field("loading", "eager") - .endObject() - .endObject() - .endObject().endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("foo") - .field("type", type) - .endObject() - .endObject().endObject().endObject().string(); - assertEquals(expectedMapping, mapper.mappingSource().string()); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyShortFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyShortFieldTypeTests.java deleted file mode 100644 index 2e22bac6e95..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyShortFieldTypeTests.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.junit.Before; - -public class LegacyShortFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new LegacyShortFieldMapper.ShortFieldType(); - } - - @Before - public void setupProperties() { - setDummyNullValue((short)10); - } - - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - // shorts are stored as ints - assertEquals(Short.valueOf((short) 3), ft.valueForDisplay(Integer.valueOf(3))); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyStringMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyStringMappingTests.java deleted file mode 100644 index a1583098292..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyStringMappingTests.java +++ /dev/null @@ -1,1196 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.IndexableFieldType; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.Mapper.BuilderContext; -import org.elasticsearch.index.mapper.ParseContext.Document; -import org.elasticsearch.index.mapper.StringFieldMapper.Builder; -import org.elasticsearch.index.mapper.StringFieldMapper.StringFieldType; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.junit.Before; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonList; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -public class LegacyStringMappingTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - IndexService indexService; - DocumentMapperParser parser; - - @Before - public void before() { - indexService = createIndex("test", - // we need 2.x since string is deprecated in 5.0 - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build()); - parser = indexService.mapperService().documentMapperParser(); - } - - public void testLimit() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("ignore_above", 5).endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("field"), notNullValue()); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "12345") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("field"), notNullValue()); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "123456") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("field"), nullValue()); - } - - private void assertDefaultAnalyzedFieldType(IndexableFieldType fieldType) { - assertThat(fieldType.omitNorms(), equalTo(false)); - assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS)); - assertThat(fieldType.storeTermVectors(), equalTo(false)); - assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); - assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); - assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); - } - - private void assertEquals(IndexableFieldType ft1, IndexableFieldType ft2) { - assertEquals(ft1.tokenized(), ft2.tokenized()); - assertEquals(ft1.omitNorms(), ft2.omitNorms()); - assertEquals(ft1.indexOptions(), ft2.indexOptions()); - assertEquals(ft1.storeTermVectors(), ft2.storeTermVectors()); - assertEquals(ft1.docValuesType(), ft2.docValuesType()); - } - - private void assertParseIdemPotent(IndexableFieldType expected, DocumentMapper mapper) throws Exception { - String mapping = mapper.toXContent(XContentFactory.jsonBuilder().startObject(), new ToXContent.MapParams(emptyMap())).endObject().string(); - mapper = parser.parse("type", new CompressedXContent(mapping)); - ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "2345") - .endObject() - .bytes()); - assertEquals(expected, doc.rootDoc().getField("field").fieldType()); - } - - public void testDefaultsForAnalyzed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType(); - assertDefaultAnalyzedFieldType(fieldType); - assertParseIdemPotent(fieldType, defaultMapper); - } - - public void testDefaultsForNotAnalyzed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType(); - assertThat(fieldType.omitNorms(), equalTo(true)); - assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS)); - assertThat(fieldType.storeTermVectors(), equalTo(false)); - assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); - assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); - assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); - assertParseIdemPotent(fieldType, defaultMapper); - - // now test it explicitly set - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").startObject("norms").field("enabled", true).endObject().field("index_options", "freqs").endObject().endObject() - .endObject().endObject().string(); - - defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - fieldType = doc.rootDoc().getField("field").fieldType(); - assertThat(fieldType.omitNorms(), equalTo(false)); - assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS_AND_FREQS)); - assertThat(fieldType.storeTermVectors(), equalTo(false)); - assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); - assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); - assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); - assertParseIdemPotent(fieldType, defaultMapper); - - // also test the deprecated omit_norms - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("index", "not_analyzed").field("omit_norms", false).endObject().endObject() - .endObject().endObject().string(); - - defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - fieldType = doc.rootDoc().getField("field").fieldType(); - assertThat(fieldType.omitNorms(), equalTo(false)); - assertParseIdemPotent(fieldType, defaultMapper); - } - - public void testSearchQuoteAnalyzerSerialization() throws Exception { - // Cases where search_quote_analyzer should not be added to the mapping. - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .field("position_increment_gap", 1000) - .endObject() - .startObject("field2") - .field("type", "string") - .field("position_increment_gap", 1000) - .field("analyzer", "standard") - .endObject() - .startObject("field3") - .field("type", "string") - .field("position_increment_gap", 1000) - .field("analyzer", "standard") - .field("search_analyzer", "simple") - .endObject() - .startObject("field4") - .field("type", "string") - .field("position_increment_gap", 1000) - .field("analyzer", "standard") - .field("search_analyzer", "simple") - .field("search_quote_analyzer", "simple") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - for (String fieldName : Arrays.asList("field1", "field2", "field3", "field4")) { - Map serializedMap = getSerializedMap(fieldName, mapper); - assertFalse(fieldName, serializedMap.containsKey("search_quote_analyzer")); - } - - // Cases where search_quote_analyzer should be present. - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("position_increment_gap", 1000) - .field("analyzer", "standard") - .field("search_analyzer", "standard") - .field("search_quote_analyzer", "simple") - .endObject() - .endObject() - .endObject().endObject().string(); - - mapper = parser.parse("type", new CompressedXContent(mapping)); - Map serializedMap = getSerializedMap("field", mapper); - assertEquals(serializedMap.get("search_quote_analyzer"), "simple"); - } - - public void testSearchAnalyzerSerialization() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("analyzer", "standard") - .field("search_analyzer", "keyword") - .endObject() - .endObject().endObject().endObject().string(); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); - - // special case: default index analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("analyzer", "default") - .field("search_analyzer", "keyword") - .endObject() - .endObject().endObject().endObject().string(); - - mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); - - // special case: default search analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("analyzer", "keyword") - .endObject() - .endObject().endObject().endObject().string(); - - mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); - - // special case: default search analyzer - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("analyzer", "keyword") - .field("search_analyzer", "default") - .endObject() - .endObject().endObject().endObject().string(); - - mapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, mapper.mappingSource().toString()); - - - mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("analyzer", "keyword") - .endObject() - .endObject().endObject().endObject().string(); - mapper = parser.parse("type", new CompressedXContent(mapping)); - - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - mapper.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"))); - builder.endObject(); - - String mappingString = builder.string(); - assertTrue(mappingString.contains("analyzer")); - assertTrue(mappingString.contains("search_analyzer")); - assertTrue(mappingString.contains("search_quote_analyzer")); - } - - private Map getSerializedMap(String fieldName, DocumentMapper mapper) throws Exception { - FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper(fieldName); - XContentBuilder builder = JsonXContent.contentBuilder().startObject(); - fieldMapper.toXContent(builder, ToXContent.EMPTY_PARAMS).endObject(); - builder.close(); - - Map fieldMap; - try (XContentParser parser = JsonXContent.jsonXContent.createParser(builder.bytes())) { - fieldMap = parser.map(); - } - @SuppressWarnings("unchecked") - Map result = (Map) fieldMap.get(fieldName); - return result; - } - - public void testTermVectors() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .field("term_vector", "no") - .endObject() - .startObject("field2") - .field("type", "string") - .field("term_vector", "yes") - .endObject() - .startObject("field3") - .field("type", "string") - .field("term_vector", "with_offsets") - .endObject() - .startObject("field4") - .field("type", "string") - .field("term_vector", "with_positions") - .endObject() - .startObject("field5") - .field("type", "string") - .field("term_vector", "with_positions_offsets") - .endObject() - .startObject("field6") - .field("type", "string") - .field("term_vector", "with_positions_offsets_payloads") - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field1", "1234") - .field("field2", "1234") - .field("field3", "1234") - .field("field4", "1234") - .field("field5", "1234") - .field("field6", "1234") - .endObject() - .bytes()); - - assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectors(), equalTo(false)); - assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorOffsets(), equalTo(false)); - assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPositions(), equalTo(false)); - assertThat(doc.rootDoc().getField("field1").fieldType().storeTermVectorPayloads(), equalTo(false)); - - assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectors(), equalTo(true)); - assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorOffsets(), equalTo(false)); - assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPositions(), equalTo(false)); - assertThat(doc.rootDoc().getField("field2").fieldType().storeTermVectorPayloads(), equalTo(false)); - - assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectors(), equalTo(true)); - assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorOffsets(), equalTo(true)); - assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPositions(), equalTo(false)); - assertThat(doc.rootDoc().getField("field3").fieldType().storeTermVectorPayloads(), equalTo(false)); - - assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectors(), equalTo(true)); - assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorOffsets(), equalTo(false)); - assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPositions(), equalTo(true)); - assertThat(doc.rootDoc().getField("field4").fieldType().storeTermVectorPayloads(), equalTo(false)); - - assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectors(), equalTo(true)); - assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorOffsets(), equalTo(true)); - assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPositions(), equalTo(true)); - assertThat(doc.rootDoc().getField("field5").fieldType().storeTermVectorPayloads(), equalTo(false)); - - assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectors(), equalTo(true)); - assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorOffsets(), equalTo(true)); - assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true)); - assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true)); - } - - public void testDocValues() throws Exception { - // doc values only work on non-analyzed content - final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); - try { - new StringFieldMapper.Builder("anything").docValues(true).build(ctx); - fail(); - } catch (Exception e) { /* OK */ } - - assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues()); - assertTrue(new Builder("anything").index(true).tokenized(false).build(ctx).fieldType().hasDocValues()); - assertFalse(new Builder("anything").index(true).tokenized(true).build(ctx).fieldType().hasDocValues()); - assertFalse(new Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).fieldType().hasDocValues()); - assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues()); - - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("str1") - .field("type", "string") - .field("index", "no") - .endObject() - .startObject("str2") - .field("type", "string") - .field("index", "not_analyzed") - .endObject() - .startObject("str3") - .field("type", "string") - .field("index", "analyzed") - .endObject() - .startObject("str4") - .field("type", "string") - .field("index", "not_analyzed") - .field("doc_values", false) - .endObject() - .startObject("str5") - .field("type", "string") - .field("index", "no") - .field("doc_values", false) - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("str1", "1234") - .field("str2", "1234") - .field("str3", "1234") - .field("str4", "1234") - .field("str5", "1234") - .endObject() - .bytes()); - final Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str1")); - assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2")); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str3")); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str4")); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str5")); - - } - - public void testBwCompatDocValues() throws Exception { - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_2_0).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - // doc values only work on non-analyzed content - final BuilderContext ctx = new BuilderContext(indexService.getIndexSettings().getSettings(), new ContentPath(1)); - try { - new StringFieldMapper.Builder("anything").docValues(true).build(ctx); - fail(); - } catch (Exception e) { /* OK */ } - - assertFalse(new Builder("anything").index(false).build(ctx).fieldType().hasDocValues()); - assertTrue(new Builder("anything").index(true).tokenized(false).build(ctx).fieldType().hasDocValues()); - assertFalse(new Builder("anything").index(true).tokenized(true).build(ctx).fieldType().hasDocValues()); - assertFalse(new Builder("anything").index(false).tokenized(false).docValues(false).build(ctx).fieldType().hasDocValues()); - assertTrue(new Builder("anything").index(false).docValues(true).build(ctx).fieldType().hasDocValues()); - - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("str1") - .field("type", "string") - .field("index", "no") - .endObject() - .startObject("str2") - .field("type", "string") - .field("index", "not_analyzed") - .endObject() - .startObject("str3") - .field("type", "string") - .field("index", "analyzed") - .endObject() - .startObject("str4") - .field("type", "string") - .field("index", "not_analyzed") - .field("doc_values", false) - .endObject() - .startObject("str5") - .field("type", "string") - .field("index", "no") - .field("doc_values", true) - .endObject() - .endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - - ParsedDocument parsedDoc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("str1", "1234") - .field("str2", "1234") - .field("str3", "1234") - .field("str4", "1234") - .field("str5", "1234") - .endObject() - .bytes()); - final Document doc = parsedDoc.rootDoc(); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str1")); - assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str2")); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str3")); - assertEquals(DocValuesType.NONE, docValuesType(doc, "str4")); - assertEquals(DocValuesType.SORTED_SET, docValuesType(doc, "str5")); - - } - - // TODO: this function shouldn't be necessary. parsing should just add a single field that is indexed and dv - public static DocValuesType docValuesType(Document document, String fieldName) { - for (IndexableField field : document.getFields(fieldName)) { - if (field.fieldType().docValuesType() != DocValuesType.NONE) { - return field.fieldType().docValuesType(); - } - } - return DocValuesType.NONE; - } - - public void testDisableNorms() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").endObject().endObject() - .endObject().endObject().string(); - - MapperService mapperService = indexService.mapperService(); - DocumentMapper defaultMapper = mapperService.merge("type", new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - IndexableFieldType fieldType = doc.rootDoc().getField("field").fieldType(); - assertEquals(false, fieldType.omitNorms()); - - String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", false).endObject() - .endObject().endObject().endObject().endObject().string(); - defaultMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject() - .bytes()); - - fieldType = doc.rootDoc().getField("field").fieldType(); - assertEquals(true, fieldType.omitNorms()); - - updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").startObject("norms").field("enabled", true).endObject() - .endObject().endObject().endObject().endObject().string(); - try { - mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("different [norms]")); - } - } - - /** - * Test that expected exceptions are thrown when creating a new index with position_offset_gap - */ - public void testPositionOffsetGapDeprecation() throws Exception { - // test deprecation exceptions on newly created indexes - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field1") - .field("type", "string") - .field("position_increment_gap", 10) - .endObject() - .startObject("field2") - .field("type", "string") - .field("position_offset_gap", 50) - .field("analyzer", "standard") - .endObject().endObject().endObject().endObject().string(); - try { - parser.parse("type", new CompressedXContent(mapping)); - fail("Mapping definition should fail with the position_offset_gap setting"); - }catch (MapperParsingException e) { - assertEquals(e.getMessage(), "Mapping definition for [field2] has unsupported parameters: [position_offset_gap : 50]"); - } - } - - public void testFielddataLoading() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .startObject("fielddata") - .field("loading", "eager_global_ordinals") - .endObject() - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - - String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .field("eager_global_ordinals", true) - .endObject().endObject() - .endObject().endObject().string(); - - assertEquals(expectedMapping, mapper.mappingSource().toString()); - assertTrue(mapper.mappers().getMapper("field").fieldType().eagerGlobalOrdinals()); - } - - public void testFielddataFilter() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .startObject("fielddata") - .startObject("filter") - .startObject("frequency") - .field("min", 2d) - .field("min_segment_size", 1000) - .endObject() - .startObject("regex") - .field("pattern", "^#.*") - .endObject() - .endObject() - .endObject() - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - - String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .startObject("fielddata_frequency_filter") - .field("min", 2d) - .field("min_segment_size", 1000) - .endObject() - .endObject().endObject() - .endObject().endObject().string(); - - assertEquals(expectedMapping, mapper.mappingSource().toString()); - StringFieldType fieldType = (StringFieldType) mapper.mappers().getMapper("field").fieldType(); - assertThat(fieldType.fielddataMinFrequency(), equalTo(2d)); - assertThat(fieldType.fielddataMaxFrequency(), equalTo((double) Integer.MAX_VALUE)); - assertThat(fieldType.fielddataMinSegmentSize(), equalTo(1000)); - } - - public void testDisabledFielddata() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .startObject("fielddata") - .field("format", "disabled") - .endObject() - .endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - - String expectedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .field("fielddata", false) - .endObject().endObject() - .endObject().endObject().string(); - - assertEquals(expectedMapping, mapper.mappingSource().toString()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> mapper.mappers().getMapper("field").fieldType().fielddataBuilder()); - assertThat(e.getMessage(), containsString("Fielddata is disabled")); - } - - public void testNonAnalyzedFieldPositionIncrement() throws IOException { - for (String index : Arrays.asList("no", "not_analyzed")) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .field("index", index) - .field("position_increment_gap", 10) - .endObject().endObject().endObject().endObject().string(); - - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type", new CompressedXContent(mapping))); - assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage()); - } - } - - public void testAnalyzedFieldPositionIncrementWithoutPositions() throws IOException { - for (String indexOptions : Arrays.asList("docs", "freqs")) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field") - .field("type", "string") - .field("index_options", indexOptions) - .field("position_increment_gap", 10) - .endObject().endObject().endObject().endObject().string(); - - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type", new CompressedXContent(mapping))); - assertEquals("Cannot set position_increment_gap on field [field] without positions enabled", e.getMessage()); - } - } - - public void testKeywordFieldAsStringWithUnsupportedField() throws IOException { - String mapping = mappingForTestField(b -> b.field("type", "keyword").field("fielddata", true)).string(); - Exception e = expectThrows(IllegalArgumentException.class, () -> parser.parse("test_type", new CompressedXContent(mapping))); - assertEquals("Automatic downgrade from [keyword] to [string] failed because parameters [fielddata] are not supported for " - + "automatic downgrades.", e.getMessage()); - } - - public void testMergeKeywordIntoString() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed")); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword")); - } - - public void testMergeKeywordIntoStringWithIndexFalse() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "no"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "no")); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("index", false)); - } - - public void testMergeKeywordIntoStringWithStore() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("store", true); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("store", true)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("store", true)); - } - - public void testMergeKeywordIntoStringWithDocValues() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("doc_values", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("doc_values", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("doc_values", false)); - } - - public void testMergeKeywordIntoStringWithNorms() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("norms", true); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("norms", true)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("norms", true)); - // norms can be an array but it'll just get squashed into true/false - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed") - .startObject("norms") - .field("enabled", true) - .field("loading", randomAsciiOfLength(5)) // Totally ignored even though it used to be eager/lazy - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword") - .startObject("norms") - .field("enabled", true) - .field("loading", randomAsciiOfLength(5)) - .endObject()); - } - - public void testMergeKeywordIntoStringWithBoost() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("boost", 1.5); - expectedMapping.put("norms", true); // Implied by having a boost - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("boost", 1.5)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("boost", 1.5)); - expectedMapping.put("boost", 1.4); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("boost", 1.4)); - } - - public void testMergeKeywordIntoStringWithFields() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - Map expectedFields = new HashMap<>(); - expectedMapping.put("fields", expectedFields); - Map expectedFoo = new HashMap<>(); - expectedFields.put("foo", expectedFoo); - expectedFoo.put("type", "string"); - expectedFoo.put("analyzer", "standard"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .endObject()); - - Map expectedBar = new HashMap<>(); - expectedFields.put("bar", expectedBar); - expectedBar.put("type", "string"); - expectedBar.put("analyzer", "whitespace"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .startObject("bar") - .field("type", "string") - .field("analyzer", "whitespace") - .endObject() - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .startObject("bar") - .field("type", "string") - .field("analyzer", "whitespace") - .endObject() - .endObject()); - } - - public void testMergeKeywordIntoStringWithCopyTo() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("copy_to", singletonList("another_field")); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("copy_to", "another_field")); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("copy_to", "another_field")); - } - - public void testMergeKeywordIntoStringWithIncludeInAll() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("include_in_all", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("include_in_all", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("include_in_all", false)); - } - - public void testMergeKeywordIntoStringWithIgnoreAbove() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("ignore_above", 128); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("ignore_above", 128)); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("ignore_above", 128)); - } - - public void testMergeKeywordIntoStringWithIndexOptions() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("index_options", "freqs"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("index_options", "freqs")); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("index_options", "freqs")); - } - - public void testMergeKeywordIntoStringWithSimilarity() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("index", "not_analyzed"); - expectedMapping.put("fielddata", false); - expectedMapping.put("similarity", "BM25"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index", "not_analyzed").field("similarity", "BM25")); - mergeMappingStep(expectedMapping, b -> b.field("type", "keyword").field("similarity", "BM25")); - } - - public void testTextFieldAsStringWithUnsupportedField() throws IOException { - String mapping = mappingForTestField(b -> b.field("type", "text").field("null_value", "kitten")).string(); - Exception e = expectThrows(IllegalArgumentException.class, () -> parser.parse("test_type", new CompressedXContent(mapping))); - assertEquals("Automatic downgrade from [text] to [string] failed because parameters [null_value] are not supported for " - + "automatic downgrades.", e.getMessage()); - } - - public void testMergeTextIntoString() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string")); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("fielddata", true)); - } - - public void testMergeTextIntoStringWithStore() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("store", true); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("store", true)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("store", true).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithDocValues() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("doc_values", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("doc_values", false).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithNorms() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("norms", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("norms", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("norms", false).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithBoost() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("boost", 1.5); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("boost", 1.5)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("boost", 1.5).field("fielddata", true)); - expectedMapping.put("boost", 1.4); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("boost", 1.4).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithFields() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - Map expectedFields = new HashMap<>(); - expectedMapping.put("fields", expectedFields); - Map expectedFoo = new HashMap<>(); - expectedFields.put("foo", expectedFoo); - expectedFoo.put("type", "string"); - expectedFoo.put("analyzer", "standard"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("fielddata", true) - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .endObject()); - - Map expectedBar = new HashMap<>(); - expectedFields.put("bar", expectedBar); - expectedBar.put("type", "string"); - expectedBar.put("analyzer", "whitespace"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string") - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .startObject("bar") - .field("type", "string") - .field("analyzer", "whitespace") - .endObject() - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("fielddata", true) - .startObject("fields") - .startObject("foo") - .field("type", "string") - .field("analyzer", "standard") - .endObject() - .startObject("bar") - .field("type", "string") - .field("analyzer", "whitespace") - .endObject() - .endObject()); - } - - public void testMergeTextIntoStringWithCopyTo() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("copy_to", singletonList("another_field")); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("copy_to", "another_field")); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("copy_to", "another_field").field("fielddata", true)); - } - - public void testMergeTextIntoStringWithFileddataDisabled() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("fielddata", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("fielddata", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text")); - } - - public void testMergeTextIntoStringWithEagerGlobalOrdinals() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("eager_global_ordinals", true); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").startObject("fielddata") - .field("format", "pagedbytes") - .field("loading", "eager_global_ordinals") - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("fielddata", true).field("eager_global_ordinals", true)); - } - - public void testMergeTextIntoStringWithFielddataFrequencyFilter() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - Map fielddataFrequencyFilter = new HashMap<>(); - expectedMapping.put("fielddata_frequency_filter", fielddataFrequencyFilter); - fielddataFrequencyFilter.put("min", 0.001); - fielddataFrequencyFilter.put("max", 0.1); - fielddataFrequencyFilter.put("min_segment_size", 100); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").startObject("fielddata") - .field("format", "pagedbytes") - .startObject("filter") - .startObject("frequency") - .field("min", 0.001) - .field("max", 0.1) - .field("min_segment_size", 100) - .endObject() - .endObject() - .endObject()); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("fielddata", true) - .startObject("fielddata_frequency_filter") - .field("min", 0.001) - .field("max", 0.1) - .field("min_segment_size", 100) - .endObject()); - } - - public void testMergeTextIntoStringWithIncludeInAll() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("include_in_all", false); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("include_in_all", false)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("include_in_all", false).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithSearchQuoteAnayzer() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("analyzer", "standard"); - expectedMapping.put("search_analyzer", "whitespace"); - expectedMapping.put("search_quote_analyzer", "keyword"); - mergeMappingStep(expectedMapping, b -> b - .field("type", "string") - .field("analyzer", "standard") - .field("search_analyzer", "whitespace") - .field("search_quote_analyzer", "keyword")); - mergeMappingStep(expectedMapping, b -> b - .field("type", "text") - .field("analyzer", "standard") - .field("search_analyzer", "whitespace") - .field("search_quote_analyzer", "keyword") - .field("fielddata", true)); - } - - public void testMergeTextIntoStringWithIndexOptions() throws IOException { - String indexOptions = randomIndexOptions(); - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - if (false == "positions".equals(indexOptions)) { - expectedMapping.put("index_options", indexOptions); - } - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("index_options", indexOptions)); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("index_options", indexOptions).field("fielddata", true)); - } - - public void testMergeTextIntoStringWithPositionIncrementGap() throws IOException { - int positionIncrementGap = between(0, 10000); - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("position_increment_gap", positionIncrementGap); - mergeMappingStep(expectedMapping, b -> b - .field("type", "string") - .field("position_increment_gap", positionIncrementGap)); - mergeMappingStep(expectedMapping, b -> b - .field("type", "text") - .field("position_increment_gap", positionIncrementGap) - .field("fielddata", true)); - } - - public void testMergeStringIntoStringWithSimilarity() throws IOException { - Map expectedMapping = new HashMap<>(); - expectedMapping.put("type", "string"); - expectedMapping.put("similarity", "BM25"); - mergeMappingStep(expectedMapping, b -> b.field("type", "string").field("similarity", "BM25")); - mergeMappingStep(expectedMapping, b -> b.field("type", "text").field("similarity", "BM25").field("fielddata", true)); - } - - private interface FieldBuilder { - void populateMappingForField(XContentBuilder b) throws IOException; - } - private void mergeMappingStep(Map expectedMapping, FieldBuilder fieldBuilder) throws IOException { - XContentBuilder b = mappingForTestField(fieldBuilder); - if (logger.isInfoEnabled()) { - logger.info("--> Updating mapping to {}", b.string()); - } - assertAcked(client().admin().indices().preparePutMapping("test").setType("test_type").setSource(b)); - GetMappingsResponse response = client().admin().indices().prepareGetMappings("test").get(); - ImmutableOpenMap index = response.getMappings().get("test"); - assertNotNull("mapping for index not found", index); - MappingMetaData type = index.get("test_type"); - assertNotNull("mapping for type not found", type); - Map properties = (Map) type.sourceAsMap().get("properties"); - assertEquals(expectedMapping, properties.get("test_field")); - } - - private XContentBuilder mappingForTestField(FieldBuilder fieldBuilder) throws IOException { - XContentBuilder b = JsonXContent.contentBuilder(); - b.startObject(); { - b.startObject("test_type"); { - b.startObject("properties"); { - b.startObject("test_field"); { - fieldBuilder.populateMappingForField(b); - } - b.endObject(); - } - b.endObject(); - } - b.endObject(); - } - return b.endObject(); - } - - private String randomIndexOptions() { - IndexOptions options = randomValueOtherThan(IndexOptions.NONE, () -> randomFrom(IndexOptions.values())); - switch (options) { - case DOCS: - return "docs"; - case DOCS_AND_FREQS: - return "freqs"; - case DOCS_AND_FREQS_AND_POSITIONS: - return "positions"; - case DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS: - return "offsets"; - default: - throw new IllegalArgumentException("Unknown options [" + options + "]"); - } - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapperTests.java deleted file mode 100644 index 125c6fbc830..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/LegacyTokenCountFieldMapperTests.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.analysis.CannedTokenStream; -import org.apache.lucene.analysis.MockTokenizer; -import org.apache.lucene.analysis.Token; -import org.apache.lucene.analysis.TokenStream; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.LegacyTokenCountFieldMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - -import static org.hamcrest.Matchers.equalTo; - -/** - * Test for {@link LegacyTokenCountFieldMapper}. - */ -public class LegacyTokenCountFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testMerge() throws IOException { - String stage1Mapping = XContentFactory.jsonBuilder().startObject() - .startObject("person") - .startObject("properties") - .startObject("tc") - .field("type", "token_count") - .field("analyzer", "keyword") - .endObject() - .endObject() - .endObject().endObject().string(); - MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService(); - DocumentMapper stage1 = mapperService.merge("person", new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - String stage2Mapping = XContentFactory.jsonBuilder().startObject() - .startObject("person") - .startObject("properties") - .startObject("tc") - .field("type", "token_count") - .field("analyzer", "standard") - .endObject() - .endObject() - .endObject().endObject().string(); - DocumentMapper stage2 = mapperService.merge("person", new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - // previous mapper has not been modified - assertThat(((LegacyTokenCountFieldMapper) stage1.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("keyword")); - // but the new one has the change - assertThat(((LegacyTokenCountFieldMapper) stage2.mappers().smartNameFieldMapper("tc")).analyzer(), equalTo("standard")); - } - - public void testCountPositions() throws IOException { - // We're looking to make sure that we: - Token t1 = new Token(); // Don't count tokens without an increment - t1.setPositionIncrement(0); - Token t2 = new Token(); - t2.setPositionIncrement(1); // Count normal tokens with one increment - Token t3 = new Token(); - t2.setPositionIncrement(2); // Count funny tokens with more than one increment - int finalTokenIncrement = 4; // Count the final token increment on the rare token streams that have them - Token[] tokens = new Token[] {t1, t2, t3}; - Collections.shuffle(Arrays.asList(tokens), random()); - final TokenStream tokenStream = new CannedTokenStream(finalTokenIncrement, 0, tokens); - // TODO: we have no CannedAnalyzer? - Analyzer analyzer = new Analyzer() { - @Override - public TokenStreamComponents createComponents(String fieldName) { - return new TokenStreamComponents(new MockTokenizer(), tokenStream); - } - }; - assertThat(LegacyTokenCountFieldMapper.countPositions(analyzer, "", ""), equalTo(7)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java index 4eaf58a602f..a6270dfc953 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MapperServiceTests.java @@ -119,7 +119,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase { Function mapping = type -> { try { return XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties") - .startObject("field1").field("type", "string") + .startObject("field1").field("type", "keyword") .endObject().endObject().endObject().endObject().string(); } catch (IOException e) { throw new UncheckedIOException(e); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java index 8ef5bffba50..5a5e2ddb509 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/MultiFieldCopyToMapperTests.java @@ -20,19 +20,13 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.MapperTestUtils; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.core.IsEqual.equalTo; @@ -41,23 +35,15 @@ public class MultiFieldCopyToMapperTests extends ESTestCase { public void testExceptionForCopyToInMultiFields() throws IOException { XContentBuilder mapping = createMappinmgWithCopyToInMultiField(); - Tuple, List> versionsWithAndWithoutExpectedExceptions = versionsWithAndWithoutExpectedExceptions(); // first check that for newer versions we throw exception if copy_to is found withing multi field - Version indexVersion = randomFrom(versionsWithAndWithoutExpectedExceptions.v1()); - MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexVersion).build()); + MapperService mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.EMPTY); try { mapperService.parse("type", new CompressedXContent(mapping.string()), true); fail("Parsing should throw an exception because the mapping contains a copy_to in a multi field"); } catch (MapperParsingException e) { assertThat(e.getMessage(), equalTo("copy_to in multi fields is not allowed. Found the copy_to in field [c] which is within a multi field.")); } - - // now test that with an older version the parsing just works - indexVersion = randomFrom(versionsWithAndWithoutExpectedExceptions.v2()); - mapperService = MapperTestUtils.newMapperService(createTempDir(), Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexVersion).build()); - DocumentMapper documentMapper = mapperService.parse("type", new CompressedXContent(mapping.string()), true); - assertFalse(documentMapper.mapping().toString().contains("copy_to")); } private static XContentBuilder createMappinmgWithCopyToInMultiField() throws IOException { @@ -83,20 +69,4 @@ public class MultiFieldCopyToMapperTests extends ESTestCase { return mapping; } - // returns a tuple where - // v1 is a list of versions for which we expect an exception when a copy_to in multi fields is found and - // v2 is older versions where we throw no exception and we just log a warning - private static Tuple, List> versionsWithAndWithoutExpectedExceptions() { - List versionsWithException = new ArrayList<>(); - List versionsWithoutException = new ArrayList<>(); - for (Version version : VersionUtils.allReleasedVersions()) { - if (version.after(Version.V_2_1_0) || - (version.after(Version.V_2_0_1) && version.before(Version.V_2_1_0))) { - versionsWithException.add(version); - } else { - versionsWithoutException.add(version); - } - } - return new Tuple<>(versionsWithException, versionsWithoutException); - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java index 5de43f5958a..f639b9c2041 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldMapperTests.java @@ -21,58 +21,23 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; -import org.junit.Before; import java.io.IOException; import java.util.Arrays; -import java.util.Collection; import java.util.HashSet; -import java.util.Set; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; -public class NumberFieldMapperTests extends ESSingleNodeTestCase { +public class NumberFieldMapperTests extends AbstractNumericFieldMapperTestCase { - private static final Set TYPES = new HashSet<>(Arrays.asList("byte", "short", "integer", "long", "float", "double")); - - IndexService indexService; - DocumentMapperParser parser; - - @Before - public void before() { - indexService = createIndex("test"); - parser = indexService.mapperService().documentMapperParser(); - } - - public void testDefaults() throws Exception { - for (String type : TYPES) { - doTestDefaults(type); - } + @Override + protected void setTypeList() { + TYPES = new HashSet<>(Arrays.asList("byte", "short", "integer", "long", "float", "double")); } @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - public void doTestDefaults(String type) throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() @@ -99,12 +64,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase { assertFalse(dvField.fieldType().stored()); } - public void testNotIndexed() throws Exception { - for (String type : TYPES) { - doTestNotIndexed(type); - } - } - + @Override public void doTestNotIndexed(String type) throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("index", false).endObject().endObject() @@ -126,12 +86,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase { assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); } - public void testNoDocValues() throws Exception { - for (String type : TYPES) { - doTestNoDocValues(type); - } - } - + @Override public void doTestNoDocValues(String type) throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("doc_values", false).endObject().endObject() @@ -154,12 +109,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase { assertEquals(123, pointField.numericValue().doubleValue(), 0d); } - public void testStore() throws Exception { - for (String type : TYPES) { - doTestStore(type); - } - } - + @Override public void doTestStore(String type) throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).field("store", true).endObject().endObject() @@ -187,12 +137,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase { assertEquals(123, storedField.numericValue().doubleValue(), 0d); } - public void testCoerce() throws Exception { - for (String type : TYPES) { - doTestCoerce(type); - } - } - + @Override public void doTestCoerce(String type) throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() @@ -239,7 +184,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase { } } - public void doTestIgnoreMalformed(String type) throws IOException { + private void doTestIgnoreMalformed(String type) throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() .endObject().endObject().string(); @@ -272,12 +217,7 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase { assertEquals(0, fields.length); } - public void testIncludeInAll() throws Exception { - for (String type : TYPES) { - doTestIncludeInAll(type); - } - } - + @Override public void doTestIncludeInAll(String type) throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", type).endObject().endObject() @@ -333,13 +273,8 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase { } } - public void testNullValue() throws IOException { - for (String type : TYPES) { - doTestNullValue(type); - } - } - - private void doTestNullValue(String type) throws IOException { + @Override + protected void doTestNullValue(String type) throws IOException { String mapping = XContentFactory.jsonBuilder().startObject() .startObject("type") .startObject("properties") @@ -406,19 +341,5 @@ public class NumberFieldMapperTests extends ESSingleNodeTestCase { ); assertThat(e.getMessage(), containsString("name cannot be empty string")); } - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - for (String type : TYPES) { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("").field("type", type).endObject().endObject() - .endObject().endObject().string(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().string()); - } } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 68959ccc684..e974a02943b 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -207,23 +207,6 @@ public class ObjectMapperTests extends ESSingleNodeTestCase { createIndex("test").mapperService().documentMapperParser().parse("", new CompressedXContent(mapping)); }); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // empty name allowed in index created before 5.0 - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - DocumentMapperParser parser = createIndex("test_old", oldIndexSettings).mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("", new CompressedXContent(mapping)); - String downgradedMapping = XContentFactory.jsonBuilder().startObject() - .startObject("") - .startObject("properties") - .startObject("name") - .field("type", "string") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(); - assertEquals(downgradedMapping, defaultMapper.mappingSource().string()); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java index 9f026c59922..9ee9ed16bd6 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ParentFieldMapperTests.java @@ -19,10 +19,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; @@ -46,8 +43,6 @@ import java.util.Set; import static java.util.Collections.emptyList; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; public class ParentFieldMapperTests extends ESSingleNodeTestCase { @@ -130,41 +125,4 @@ public class ParentFieldMapperTests extends ESSingleNodeTestCase { return numFieldWithParentPrefix; } - public void testPost2Dot0LazyLoading() { - ParentFieldMapper.Builder builder = new ParentFieldMapper.Builder("child"); - builder.type("parent"); - builder.eagerGlobalOrdinals(false); - - ParentFieldMapper parentFieldMapper = builder.build(new Mapper.BuilderContext(post2Dot0IndexSettings(), new ContentPath(0))); - - assertThat(parentFieldMapper.getParentJoinFieldType().name(), equalTo("_parent#child")); - assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); - assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - - assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent")); - assertThat(parentFieldMapper.fieldType().eagerGlobalOrdinals(), equalTo(false)); - assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true)); - assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - } - - public void testPost2Dot0EagerLoading() { - ParentFieldMapper.Builder builder = new ParentFieldMapper.Builder("child"); - builder.type("parent"); - builder.eagerGlobalOrdinals(true); - - ParentFieldMapper parentFieldMapper = builder.build(new Mapper.BuilderContext(post2Dot0IndexSettings(), new ContentPath(0))); - - assertThat(parentFieldMapper.getParentJoinFieldType().name(), equalTo("_parent#child")); - assertThat(parentFieldMapper.getParentJoinFieldType().hasDocValues(), is(true)); - assertThat(parentFieldMapper.getParentJoinFieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - - assertThat(parentFieldMapper.fieldType().name(), equalTo("_parent#parent")); - assertThat(parentFieldMapper.fieldType().eagerGlobalOrdinals(), equalTo(true)); - assertThat(parentFieldMapper.fieldType().hasDocValues(), is(true)); - assertThat(parentFieldMapper.fieldType().docValuesType(), equalTo(DocValuesType.SORTED)); - } - - private static Settings post2Dot0IndexSettings() { - return Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0).build(); - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java new file mode 100644 index 00000000000..532be6a9b6c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldMapperTests.java @@ -0,0 +1,370 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import org.apache.lucene.index.IndexableField; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashSet; + +import static org.elasticsearch.index.query.RangeQueryBuilder.GT_FIELD; +import static org.elasticsearch.index.query.RangeQueryBuilder.GTE_FIELD; +import static org.elasticsearch.index.query.RangeQueryBuilder.LT_FIELD; +import static org.elasticsearch.index.query.RangeQueryBuilder.LTE_FIELD; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.containsString; + +public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase { + private static String FROM_DATE = "2016-10-31"; + private static String TO_DATE = "2016-11-01 20:00:00"; + private static int FROM = 5; + private static String FROM_STR = FROM + ""; + private static int TO = 10; + private static String TO_STR = TO + ""; + private static String DATE_FORMAT = "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis"; + + @Override + protected void setTypeList() { + TYPES = new HashSet<>(Arrays.asList("date_range", "float_range", "double_range", "integer_range", "long_range")); + } + + private Object getFrom(String type) { + if (type.equals("date_range")) { + return FROM_DATE; + } + return random().nextBoolean() ? FROM : FROM_STR; + } + + private String getFromField() { + return random().nextBoolean() ? GT_FIELD.getPreferredName() : GTE_FIELD.getPreferredName(); + } + + private String getToField() { + return random().nextBoolean() ? LT_FIELD.getPreferredName() : LTE_FIELD.getPreferredName(); + } + + private Object getTo(String type) { + if (type.equals("date_range")) { + return TO_DATE; + } + return random().nextBoolean() ? TO : TO_STR; + } + + private Number getMax(String type) { + if (type.equals("date_range") || type.equals("long_range")) { + return Long.MAX_VALUE; + } else if (type.equals("integer_range")) { + return Integer.MAX_VALUE; + } else if (type.equals("float_range")) { + return Float.POSITIVE_INFINITY; + } + return Double.POSITIVE_INFINITY; + } + + @Override + public void doTestDefaults(String type) throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type); + if (type.equals("date_range")) { + mapping = mapping.field("format", DATE_FORMAT); + } + mapping = mapping.endObject().endObject().endObject().endObject(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + assertEquals(mapping.string(), mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .startObject("field") + .field(getFromField(), getFrom(type)) + .field(getToField(), getTo(type)) + .endObject() + .endObject().bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + IndexableField pointField = fields[0]; + assertEquals(2, pointField.fieldType().pointDimensionCount()); + assertFalse(pointField.fieldType().stored()); + } + + @Override + protected void doTestNotIndexed(String type) throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).field("index", false); + if (type.equals("date_range")) { + mapping = mapping.field("format", DATE_FORMAT); + } + mapping = mapping.endObject().endObject().endObject().endObject(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + assertEquals(mapping.string(), mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .startObject("field") + .field(getFromField(), getFrom(type)) + .field(getToField(), getTo(type)) + .endObject() + .endObject().bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(0, fields.length); + } + + @Override + protected void doTestNoDocValues(String type) throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).field("doc_values", false); + if (type.equals("date_range")) { + mapping = mapping.field("format", DATE_FORMAT); + } + mapping = mapping.endObject().endObject().endObject().endObject(); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + assertEquals(mapping.string(), mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .startObject("field") + .field(getFromField(), getFrom(type)) + .field(getToField(), getTo(type)) + .endObject() + .endObject().bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + IndexableField pointField = fields[0]; + assertEquals(2, pointField.fieldType().pointDimensionCount()); + } + + @Override + protected void doTestStore(String type) throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).field("store", true); + if (type.equals("date_range")) { + mapping = mapping.field("format", DATE_FORMAT); + } + mapping = mapping.endObject().endObject().endObject().endObject(); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + assertEquals(mapping.string(), mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .startObject("field") + .field(getFromField(), getFrom(type)) + .field(getToField(), getTo(type)) + .endObject() + .endObject().bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(2, pointField.fieldType().pointDimensionCount()); + IndexableField storedField = fields[1]; + assertTrue(storedField.fieldType().stored()); + assertThat(storedField.stringValue(), containsString(type.equals("date_range") ? "1477872000000" : "5")); + } + + @Override + public void doTestCoerce(String type) throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type); + if (type.equals("date_range")) { + mapping = mapping.field("format", DATE_FORMAT); + } + mapping = mapping.endObject().endObject().endObject().endObject(); + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + + assertEquals(mapping.string(), mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .startObject("field") + .field(getFromField(), getFrom(type)) + .field(getToField(), getTo(type)) + .endObject() + .endObject().bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + IndexableField pointField = fields[0]; + assertEquals(2, pointField.fieldType().pointDimensionCount()); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).field("coerce", false).endObject().endObject() + .endObject().endObject(); + DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping.string())); + + assertEquals(mapping.string(), mapper2.mappingSource().toString()); + + ThrowingRunnable runnable = () -> mapper2.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .startObject("field") + .field(getFromField(), "5.2") + .field(getToField(), "10") + .endObject() + .endObject().bytes()); + MapperParsingException e = expectThrows(MapperParsingException.class, runnable); + assertThat(e.getCause().getMessage(), anyOf(containsString("passed as String"), containsString("failed to parse date"))); + } + + @Override + protected void doTestIncludeInAll(String type) throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type); + if (type.equals("date_range")) { + mapping = mapping.field("format", DATE_FORMAT); + } + mapping = mapping.endObject().endObject().endObject().endObject(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + + assertEquals(mapping.string(), mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .startObject("field") + .field(getFromField(), getFrom(type)) + .field(getToField(), getTo(type)) + .endObject() + .endObject().bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("_all"); + assertEquals(1, fields.length); + + assertThat(fields[0].stringValue(), containsString(type.equals("date_range") ? "1477872000000" : "5")); + + mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type); + if (type.equals("date_range")) { + mapping = mapping.field("format", DATE_FORMAT); + } + mapping = mapping.field("include_in_all", false).endObject().endObject() + .endObject().endObject(); + + mapper = parser.parse("type", new CompressedXContent(mapping.string())); + + assertEquals(mapping.string(), mapper.mappingSource().toString()); + + doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .startObject("field") + .field(getFromField(), getFrom(type)) + .field(getToField(), getTo(type)) + .endObject() + .endObject().bytes()); + + fields = doc.rootDoc().getFields("_all"); + assertEquals(0, fields.length); + } + + @Override + protected void doTestNullValue(String type) throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).field("store", true); + if (type.equals("date_range")) { + mapping = mapping.field("format", DATE_FORMAT); + } + mapping = mapping.endObject().endObject().endObject().endObject(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + assertEquals(mapping.string(), mapper.mappingSource().toString()); + + // test null value for min and max + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .startObject("field") + .nullField(getFromField()) + .nullField(getToField()) + .endObject() + .endObject().bytes()); + assertEquals(2, doc.rootDoc().getFields("field").length); + IndexableField[] fields = doc.rootDoc().getFields("field"); + IndexableField storedField = fields[1]; + assertThat(storedField.stringValue(), containsString(type.equals("date_range") ? Long.MAX_VALUE+"" : getMax(type)+"")); + + // test null max value + doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .startObject("field") + .field(getFromField(), getFrom(type)) + .nullField(getToField()) + .endObject() + .endObject().bytes()); + + fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(2, pointField.fieldType().pointDimensionCount()); + assertFalse(pointField.fieldType().stored()); + storedField = fields[1]; + assertTrue(storedField.fieldType().stored()); + assertThat(storedField.stringValue(), containsString(type.equals("date_range") ? "1477872000000" : "5")); + assertThat(storedField.stringValue(), containsString(getMax(type) + "")); + } + + public void testNoBounds() throws Exception { + for (String type : TYPES) { + doTestNoBounds(type); + } + } + + public void doTestNoBounds(String type) throws IOException { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", type).field("store", true); + if (type.equals("date_range")) { + mapping = mapping.field("format", DATE_FORMAT); + } + mapping = mapping.endObject().endObject().endObject().endObject(); + + DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); + assertEquals(mapping.string(), mapper.mappingSource().toString()); + + // test no bounds specified + ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder() + .startObject() + .startObject("field") + .endObject() + .endObject().bytes()); + + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(2, fields.length); + IndexableField pointField = fields[0]; + assertEquals(2, pointField.fieldType().pointDimensionCount()); + assertFalse(pointField.fieldType().stored()); + IndexableField storedField = fields[1]; + assertTrue(storedField.fieldType().stored()); + assertThat(storedField.stringValue(), containsString(type.equals("date_range") ? Long.MAX_VALUE+"" : getMax(type)+"")); + assertThat(storedField.stringValue(), containsString(getMax(type) + "")); + } + + public void testIllegalArguments() throws Exception { + XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("field").field("type", RangeFieldMapper.RangeType.INTEGER.name) + .field("format", DATE_FORMAT).endObject().endObject().endObject().endObject(); + + ThrowingRunnable runnable = () -> parser.parse("type", new CompressedXContent(mapping.string())); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, runnable); + assertThat(e.getMessage(), containsString("should not define a dateTimeFormatter")); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java new file mode 100644 index 00000000000..b4bdea30c30 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -0,0 +1,185 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.index.mapper; + +import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import org.apache.lucene.document.DoubleRangeField; +import org.apache.lucene.document.FloatRangeField; +import org.apache.lucene.document.IntRangeField; +import org.apache.lucene.document.LongRangeField; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.search.Query; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.geo.ShapeRelation; +import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.RangeFieldMapper.RangeType; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.test.IndexSettingsModule; +import org.joda.time.DateTime; +import org.junit.Before; + +import java.util.Locale; + +public class RangeFieldTypeTests extends FieldTypeTestCase { + RangeType type; + protected static String FIELDNAME = "field"; + protected static int DISTANCE = 10; + private static long nowInMillis; + + @Before + public void setupProperties() { + type = RandomPicks.randomFrom(random(), RangeType.values()); + nowInMillis = randomPositiveLong(); + if (type == RangeType.DATE) { + addModifier(new Modifier("format", true) { + @Override + public void modify(MappedFieldType ft) { + ((RangeFieldMapper.RangeFieldType) ft).setDateTimeFormatter(Joda.forPattern("basic_week_date", Locale.ROOT)); + } + }); + addModifier(new Modifier("locale", true) { + @Override + public void modify(MappedFieldType ft) { + ((RangeFieldMapper.RangeFieldType) ft).setDateTimeFormatter(Joda.forPattern("date_optional_time", Locale.CANADA)); + } + }); + } + } + + @Override + protected RangeFieldMapper.RangeFieldType createDefaultFieldType() { + return new RangeFieldMapper.RangeFieldType(type); + } + + public void testRangeQuery() throws Exception { + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); + QueryShardContext context = new QueryShardContext(0, idxSettings, null, null, null, null, null, null, null, null, null, + () -> nowInMillis); + RangeFieldMapper.RangeFieldType ft = new RangeFieldMapper.RangeFieldType(type); + ft.setName(FIELDNAME); + ft.setIndexOptions(IndexOptions.DOCS); + + ShapeRelation relation = RandomPicks.randomFrom(random(), ShapeRelation.values()); + boolean includeLower = random().nextBoolean(); + boolean includeUpper = random().nextBoolean(); + Object from = nextFrom(); + Object to = nextTo(from); + + assertEquals(getExpectedRangeQuery(relation, from, to, includeLower, includeUpper), + ft.rangeQuery(from, to, includeLower, includeUpper, relation, context)); + } + + private Query getExpectedRangeQuery(ShapeRelation relation, Object from, Object to, boolean includeLower, boolean includeUpper) { + switch (type) { + case DATE: + return getDateRangeQuery(relation, (DateTime)from, (DateTime)to, includeLower, includeUpper); + case INTEGER: + return getIntRangeQuery(relation, (int)from, (int)to, includeLower, includeUpper); + case LONG: + return getLongRangeQuery(relation, (long)from, (long)to, includeLower, includeUpper); + case DOUBLE: + return getDoubleRangeQuery(relation, (double)from, (double)to, includeLower, includeUpper); + default: + return getFloatRangeQuery(relation, (float)from, (float)to, includeLower, includeUpper); + } + } + + private Query getDateRangeQuery(ShapeRelation relation, DateTime from, DateTime to, boolean includeLower, boolean includeUpper) { + return getLongRangeQuery(relation, from.getMillis(), to.getMillis(), includeLower, includeUpper); + } + + private Query getIntRangeQuery(ShapeRelation relation, int from, int to, boolean includeLower, boolean includeUpper) { + int[] lower = new int[] {from + (includeLower ? 0 : 1)}; + int[] upper = new int[] {to - (includeUpper ? 0 : 1)}; + if (relation == ShapeRelation.WITHIN) { + return IntRangeField.newWithinQuery(FIELDNAME, lower, upper); + } else if (relation == ShapeRelation.CONTAINS) { + return IntRangeField.newContainsQuery(FIELDNAME, lower, upper); + } + return IntRangeField.newIntersectsQuery(FIELDNAME, lower, upper); + } + + private Query getLongRangeQuery(ShapeRelation relation, long from, long to, boolean includeLower, boolean includeUpper) { + long[] lower = new long[] {from + (includeLower ? 0 : 1)}; + long[] upper = new long[] {to - (includeUpper ? 0 : 1)}; + if (relation == ShapeRelation.WITHIN) { + return LongRangeField.newWithinQuery(FIELDNAME, lower, upper); + } else if (relation == ShapeRelation.CONTAINS) { + return LongRangeField.newContainsQuery(FIELDNAME, lower, upper); + } + return LongRangeField.newIntersectsQuery(FIELDNAME, lower, upper); + } + + private Query getFloatRangeQuery(ShapeRelation relation, float from, float to, boolean includeLower, boolean includeUpper) { + float[] lower = new float[] {includeLower ? from : Math.nextUp(from)}; + float[] upper = new float[] {includeUpper ? to : Math.nextDown(to)}; + if (relation == ShapeRelation.WITHIN) { + return FloatRangeField.newWithinQuery(FIELDNAME, lower, upper); + } else if (relation == ShapeRelation.CONTAINS) { + return FloatRangeField.newContainsQuery(FIELDNAME, lower, upper); + } + return FloatRangeField.newIntersectsQuery(FIELDNAME, lower, upper); + } + + private Query getDoubleRangeQuery(ShapeRelation relation, double from, double to, boolean includeLower, boolean includeUpper) { + double[] lower = new double[] {includeLower ? from : Math.nextUp(from)}; + double[] upper = new double[] {includeUpper ? to : Math.nextDown(to)}; + if (relation == ShapeRelation.WITHIN) { + return DoubleRangeField.newWithinQuery(FIELDNAME, lower, upper); + } else if (relation == ShapeRelation.CONTAINS) { + return DoubleRangeField.newContainsQuery(FIELDNAME, lower, upper); + } + return DoubleRangeField.newIntersectsQuery(FIELDNAME, lower, upper); + } + + private Object nextFrom() { + switch (type) { + case INTEGER: + return (int)(random().nextInt() * 0.5 - DISTANCE); + case DATE: + return DateTime.now(); + case LONG: + return (long)(random().nextLong() * 0.5 - DISTANCE); + case FLOAT: + return (float)(random().nextFloat() * 0.5 - DISTANCE); + default: + return random().nextDouble() * 0.5 - DISTANCE; + } + } + + private Object nextTo(Object from) { + switch (type) { + case INTEGER: + return (Integer)from + DISTANCE; + case DATE: + return DateTime.now().plusDays(DISTANCE); + case LONG: + return (Long)from + DISTANCE; + case DOUBLE: + return (Double)from + DISTANCE; + default: + return (Float)from + DISTANCE; + } + } +} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java index 3556cea23ad..c172217aa6c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -21,29 +21,17 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import org.junit.Before; import java.io.IOException; import java.util.Collection; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { @@ -52,7 +40,7 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser; @Before - public void before() { + public void setup() { indexService = createIndex("test"); parser = indexService.mapperService().documentMapperParser(); } @@ -365,14 +353,5 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // before 5.x - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().toString()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java index c6f9615623c..8aa3d25aebe 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java @@ -68,18 +68,6 @@ public class SourceFieldMapperTests extends ESSingleNodeTestCase { assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.SMILE)); } - public void testFormatBackCompat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_source").field("format", "json").endObject() - .endObject().endObject().string(); - Settings settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_2_0)) - .build(); - - DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser(); - parser.parse("type", new CompressedXContent(mapping)); // no exception - } - public void testIncludes() throws Exception { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("_source").array("includes", new String[]{"path1*"}).endObject() diff --git a/core/src/test/java/org/elasticsearch/index/mapper/StringFieldMapperPositionIncrementGapTests.java b/core/src/test/java/org/elasticsearch/index/mapper/StringFieldMapperPositionIncrementGapTests.java deleted file mode 100644 index 6a68c537da1..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/StringFieldMapperPositionIncrementGapTests.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.hamcrest.Matchers.containsString; - -/** - * Tests that position_increment_gap is read from the mapper and applies as - * expected in queries. - */ -public class StringFieldMapperPositionIncrementGapTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - /** - * The default position_increment_gap should be large enough that most - * "sensible" queries phrase slops won't match across values. - */ - public void testDefault() throws IOException { - assertGapIsOneHundred(client(), "test", "test"); - } - - /** - * Asserts that the post-2.0 default is being applied. - */ - public static void assertGapIsOneHundred(Client client, String indexName, String type) throws IOException { - testGap(client, indexName, type, 100); - - // No match across gap using default slop with default positionIncrementGap - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two")).get(), 0); - - // Nor with small-ish values - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(5)).get(), 0); - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(50)).get(), 0); - - // But huge-ish values still match - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(500)).get(), 1); - } - - public void testZero() throws IOException { - setupGapInMapping(0); - assertGapIsZero(client(), "test", "test"); - } - - /** - * Asserts that the pre-2.0 default has been applied or explicitly - * configured. - */ - public static void assertGapIsZero(Client client, String indexName, String type) throws IOException { - testGap(client, indexName, type, 0); - /* - * Phrases match across different values using default slop with pre-2.0 default - * position_increment_gap. - */ - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two")).get(), 1); - } - - public void testLargerThanDefault() throws IOException { - setupGapInMapping(10000); - testGap(client(), "test", "test", 10000); - } - - public void testSmallerThanDefault() throws IOException { - setupGapInMapping(2); - testGap(client(), "test", "test", 2); - } - - public void testNegativeIsError() throws IOException { - try { - setupGapInMapping(-1); - fail("Expected an error"); - } catch (MapperParsingException e) { - assertThat(ExceptionsHelper.detailedMessage(e), containsString("positions_increment_gap less than 0 aren't allowed")); - } - } - - /** - * Tests that the default actually defaults to the position_increment_gap - * configured in the analyzer. This behavior is very old and a little - * strange but not worth breaking some thought. - */ - public void testDefaultDefaultsToAnalyzer() throws IOException { - Settings settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) - .put("analysis.analyzer.gappy.type", "custom") - .put("analysis.analyzer.gappy.tokenizer", "standard") - .put("analysis.analyzer.gappy.position_increment_gap", "2") - .build(); - setupAnalyzer(settings, "gappy"); - testGap(client(), "test", "test", 2); - } - - /** - * Build an index named "test" with a field named "string" with the provided - * positionIncrementGap that uses the standard analyzer. - */ - private void setupGapInMapping(int positionIncrementGap) throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("string"); - mapping.field("type", "string"); - mapping.field("position_increment_gap", positionIncrementGap); - mapping.endObject().endObject().endObject(); - client().admin().indices().prepareCreate("test") - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build()) - .addMapping("test", mapping) - .get(); - } - - /** - * Build an index named "test" with the provided settings and and a field - * named "string" that uses the specified analyzer and default - * position_increment_gap. - */ - private void setupAnalyzer(Settings settings, String analyzer) throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("properties").startObject("string"); - mapping.field("type", "string"); - mapping.field("analyzer", analyzer); - mapping.endObject().endObject().endObject(); - client().admin().indices().prepareCreate("test") - .addMapping("test", mapping) - .setSettings(settings) - .get(); - } - - private static void testGap(Client client, String indexName, String type, int positionIncrementGap) throws IOException { - client.prepareIndex(indexName, type, "position_gap_test").setSource("string", Arrays.asList("one", "two three")) - .setRefreshPolicy(IMMEDIATE).get(); - - // Baseline - phrase query finds matches in the same field value - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "two three")).get(), 1); - - if (positionIncrementGap > 0) { - // No match across gaps when slop < position gap - assertHitCount( - client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap - 1)).get(), - 0); - } - - // Match across gaps when slop >= position gap - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap)).get(), 1); - assertHitCount(client.prepareSearch(indexName).setQuery(matchPhraseQuery("string", "one two").slop(positionIncrementGap + 1)).get(), - 1); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/StringFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/StringFieldTypeTests.java deleted file mode 100644 index 558253c463d..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/StringFieldTypeTests.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.StringFieldMapper; -import org.junit.Before; - -public class StringFieldTypeTests extends FieldTypeTestCase { - @Override - protected MappedFieldType createDefaultFieldType() { - return new StringFieldMapper.StringFieldType(); - } - @Before - public void setupProperties() { - addModifier(new Modifier("fielddata", true) { - @Override - public void modify(MappedFieldType ft) { - StringFieldMapper.StringFieldType tft = (StringFieldMapper.StringFieldType)ft; - tft.setFielddata(tft.fielddata() == false); - } - }); - addModifier(new Modifier("fielddata_frequency_filter.min", true) { - @Override - public void modify(MappedFieldType ft) { - StringFieldMapper.StringFieldType tft = (StringFieldMapper.StringFieldType)ft; - tft.setFielddataMinFrequency(3); - } - }); - addModifier(new Modifier("fielddata_frequency_filter.max", true) { - @Override - public void modify(MappedFieldType ft) { - StringFieldMapper.StringFieldType tft = (StringFieldMapper.StringFieldType)ft; - tft.setFielddataMaxFrequency(0.2); - } - }); - addModifier(new Modifier("fielddata_frequency_filter.min_segment_size", true) { - @Override - public void modify(MappedFieldType ft) { - StringFieldMapper.StringFieldType tft = (StringFieldMapper.StringFieldType)ft; - tft.setFielddataMinSegmentSize(1000); - } - }); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/StringMappingUpgradeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/StringMappingUpgradeTests.java deleted file mode 100644 index 311bf0205ed..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/StringMappingUpgradeTests.java +++ /dev/null @@ -1,446 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import com.carrotsearch.randomizedtesting.generators.RandomPicks; - -import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.DocumentMapperParser; -import org.elasticsearch.index.mapper.FieldMapper; -import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.Mapper; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.StringFieldMapper; -import org.elasticsearch.index.mapper.TextFieldMapper; -import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.instanceOf; - -public class StringMappingUpgradeTests extends ESSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testUpgradeDefaults() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - } - - public void testUpgradeAnalyzedString() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("index", "analyzed").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - } - - public void testUpgradeNotAnalyzedString() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("index", "not_analyzed").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(KeywordFieldMapper.class)); - } - - public void testUpgradeNotIndexedString() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string").field("index", "no").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(KeywordFieldMapper.class)); - assertEquals(IndexOptions.NONE, field.fieldType().indexOptions()); - } - - public void testUpgradeIndexOptions() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("index_options", "offsets").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS, field.fieldType().indexOptions()); - } - - public void testUpgradePositionGap() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("position_increment_gap", 42).endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - assertEquals(42, field.fieldType().indexAnalyzer().getPositionIncrementGap("field")); - } - - public void testIllegalIndexValue() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("index", false) - .endObject() - .endObject() .endObject().endObject().string(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type", new CompressedXContent(mapping))); - assertThat(e.getMessage(), - containsString("Can't parse [index] value [false] for field [field], expected [no], [not_analyzed] or [analyzed]")); - } - - public void testNotSupportedUpgrade() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("index", "not_analyzed").field("analyzer", "keyword").endObject().endObject() - .endObject().endObject().string(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type", new CompressedXContent(mapping))); - assertThat(e.getMessage(), containsString("The [string] type is removed in 5.0")); - } - - public void testUpgradeFielddataSettings() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String format = randomFrom("paged_bytes", "disabled"); - String loading = randomFrom("lazy", "eager", "eager_global_ordinals"); - boolean keyword = random().nextBoolean(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("index", keyword ? "not_analyzed" : "analyzed") - .startObject("fielddata") - .field("format", format) - .field("loading", loading) - .startObject("filter") - .startObject("frequency") - .field("min", 3) - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - if (keyword) { - assertThat(field, instanceOf(KeywordFieldMapper.class)); - } else { - assertThat(field, instanceOf(TextFieldMapper.class)); - TextFieldType fieldType = (TextFieldType) field.fieldType(); - assertEquals("disabled".equals(format) == false, fieldType.fielddata()); - assertEquals(3, fieldType.fielddataMinFrequency(), 0d); - assertEquals(Integer.MAX_VALUE, fieldType.fielddataMaxFrequency(), 0d); - } - assertEquals("eager_global_ordinals".equals(loading), field.fieldType().eagerGlobalOrdinals()); - } - - public void testUpgradeIgnoreAbove() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("index", "not_analyzed").field("ignore_above", 200).endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(KeywordFieldMapper.class)); - assertEquals(200, ((KeywordFieldMapper) field).ignoreAbove()); - } - - public void testUpgradeAnalyzer() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("analyzer", "standard") - .field("search_analyzer", "whitespace") - .field("search_quote_analyzer", "keyword").endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - assertEquals("standard", field.fieldType().indexAnalyzer().name()); - assertEquals("whitespace", field.fieldType().searchAnalyzer().name()); - assertEquals("keyword", field.fieldType().searchQuoteAnalyzer().name()); - } - - public void testUpgradeTextIncludeInAll() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("include_in_all", false).endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(TextFieldMapper.class)); - assertFalse(((TextFieldMapper) field).includeInAll()); - } - - public void testUpgradeKeywordIncludeInAll() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string") - .field("index", "not_analyzed").field("include_in_all", true).endObject().endObject() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - FieldMapper field = mapper.mappers().getMapper("field"); - assertThat(field, instanceOf(KeywordFieldMapper.class)); - assertTrue(((KeywordFieldMapper) field).includeInAll()); - } - - public void testUpgradeRandomMapping() throws IOException { - final int iters = 20; - for (int i = 0; i < iters; ++i) { - doTestUpgradeRandomMapping(i); - } - } - - private void doTestUpgradeRandomMapping(int iter) throws IOException { - IndexService indexService; - boolean oldIndex = randomBoolean(); - String indexName = "test" + iter; - if (oldIndex) { - Settings settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0) - .build(); - indexService = createIndex(indexName, settings); - } else { - indexService = createIndex(indexName); - } - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", "string"); - boolean keyword = randomBoolean(); - boolean hasNorms = keyword == false; - boolean shouldUpgrade = true; - if (keyword) { - mapping.field("index", randomBoolean() ? "not_analyzed" : "no"); - } else if (randomBoolean()) { - mapping.field("index", "analyzed"); - } - if (randomBoolean()) { - mapping.field("store", RandomPicks.randomFrom(random(), Arrays.asList("yes", "no", true, false))); - } - if (keyword && randomBoolean()) { - mapping.field("doc_values", randomBoolean()); - } - if (keyword == false && randomBoolean()) { - mapping.field("analyzer", "keyword"); - } - if (randomBoolean()) { - hasNorms = randomBoolean(); - if (randomBoolean()) { - mapping.field("omit_norms", hasNorms == false); - } else { - mapping.field("norms", Collections.singletonMap("enabled", hasNorms)); - } - } - if (randomBoolean()) { - Map fielddata = new HashMap<>(); - if (randomBoolean()) { - fielddata.put("format", randomFrom("paged_bytes", "disabled")); - } - if (randomBoolean()) { - fielddata.put("loading", randomFrom("lazy", "eager", "eager_global_ordinals")); - } - if (randomBoolean()) { - Map frequencyFilter = new HashMap<>(); - frequencyFilter.put("min", 10); - frequencyFilter.put("max", 1000); - frequencyFilter.put("min_segment_size", 10000); - } - } - if (randomBoolean()) { - mapping.startObject("fields").startObject("raw").field("type", "keyword").endObject().endObject(); - } - if (randomBoolean()) { - mapping.field("copy_to", "bar"); - } - if (randomBoolean()) { - // this option is not upgraded automatically - if (keyword) { - mapping.field("index_options", "docs"); - } else { - mapping.field("ignore_above", 30); - } - shouldUpgrade = false; - } - mapping.endObject().endObject().endObject().endObject(); - - if (oldIndex == false && shouldUpgrade == false) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> parser.parse("type", new CompressedXContent(mapping.string()))); - assertThat(e.getMessage(), containsString("The [string] type is removed in 5.0")); - } else { - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string())); - FieldMapper field = mapper.mappers().getMapper("field"); - if (oldIndex) { - assertThat(field, instanceOf(StringFieldMapper.class)); - } else if (keyword) { - assertThat(field, instanceOf(KeywordFieldMapper.class)); - } else { - assertThat(field, instanceOf(TextFieldMapper.class)); - } - if (field.fieldType().indexOptions() != IndexOptions.NONE) { - assertEquals(hasNorms, field.fieldType().omitNorms() == false); - } - } - } - - public void testUpgradeTemplateWithDynamicType() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startArray("dynamic_templates") - .startObject() - .startObject("my_template") - .field("match_mapping_type", "string") - .startObject("mapping") - .field("store", true) - .endObject() - .endObject() - .endObject() - .endArray() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "id", source); - Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); - assertThat(fooMapper, instanceOf(TextFieldMapper.class)); - assertTrue(((TextFieldMapper) fooMapper).fieldType().stored()); - } - - public void testUpgradeTemplateWithDynamicType2() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startArray("dynamic_templates") - .startObject() - .startObject("my_template") - .field("match_mapping_type", "string") - .startObject("mapping") - .field("type", "{dynamic_type}") - .field("store", true) - .endObject() - .endObject() - .endObject() - .endArray() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "id", source); - Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); - assertThat(fooMapper, instanceOf(TextFieldMapper.class)); - assertTrue(((TextFieldMapper) fooMapper).fieldType().stored()); - } - - public void testUpgradeTemplateWithDynamicTypeKeyword() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startArray("dynamic_templates") - .startObject() - .startObject("my_template") - .field("match_mapping_type", "string") - .startObject("mapping") - .field("index", "not_analyzed") - .endObject() - .endObject() - .endObject() - .endArray() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "id", source); - Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); - assertThat(fooMapper, instanceOf(KeywordFieldMapper.class)); - } - - public void testUpgradeTemplateWithDynamicTypeKeyword2() throws IOException { - IndexService indexService = createIndex("test"); - DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startArray("dynamic_templates") - .startObject() - .startObject("my_template") - .field("match_mapping_type", "string") - .startObject("mapping") - .field("type", "{dynamic_type}") - .field("index", "not_analyzed") - .endObject() - .endObject() - .endObject() - .endArray() - .endObject().endObject().string(); - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject().bytes(); - ParsedDocument doc = mapper.parse("test", "type", "id", source); - Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); - assertThat(fooMapper, instanceOf(KeywordFieldMapper.class)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TTLFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TTLFieldMapperTests.java deleted file mode 100644 index bf51be3c2d4..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/TTLFieldMapperTests.java +++ /dev/null @@ -1,322 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.MapperService.MergeReason; -import org.elasticsearch.index.mapper.ParsedDocument; -import org.elasticsearch.index.mapper.SourceToParse; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Collection; - -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.startsWith; - -public class TTLFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testRejectedOn5x() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_ttl") - .field("enabled", true) - .endObject() - .endObject().endObject().string(); - IndexService index = createIndex("test"); - IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, - () -> index.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false)); - assertThat(expected.getMessage(), startsWith("[_ttl] is removed")); - } - - public void testSimpleDisabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).ttl(Long.MAX_VALUE)); - - assertThat(doc.rootDoc().getField("_ttl"), equalTo(null)); - } - - public void testEnabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl").field("enabled", "yes").endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).ttl(Long.MAX_VALUE)); - - assertThat(doc.rootDoc().getField("_ttl").fieldType().stored(), equalTo(true)); - assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_ttl").fieldType().indexOptions()); - assertThat(doc.rootDoc().getField("_ttl").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); - } - - public void testDefaultValues() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(docMapper.TTLFieldMapper().enabled(), equalTo(TTLFieldMapper.Defaults.ENABLED_STATE.enabled)); - assertThat(docMapper.TTLFieldMapper().fieldType().stored(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.stored())); - assertThat(docMapper.TTLFieldMapper().fieldType().indexOptions(), equalTo(TTLFieldMapper.Defaults.TTL_FIELD_TYPE.indexOptions())); - } - - public void testThatEnablingTTLFieldOnMergeWorks() throws Exception { - String mappingWithoutTtl = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject().string(); - - String mappingWithTtl = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("enabled", "yes") - .endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject().string(); - - MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService(); - DocumentMapper mapperWithoutTtl = mapperService.merge("type", new CompressedXContent(mappingWithoutTtl), MapperService.MergeReason.MAPPING_UPDATE, false); - DocumentMapper mapperWithTtl = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); - - assertThat(mapperWithoutTtl.TTLFieldMapper().enabled(), equalTo(false)); - assertThat(mapperWithTtl.TTLFieldMapper().enabled(), equalTo(true)); - } - - public void testThatChangingTTLKeepsMapperEnabled() throws Exception { - String mappingWithTtl = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("enabled", "yes") - .endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject().string(); - - String updatedMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("default", "7d") - .endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject().string(); - - MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService(); - DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); - DocumentMapper updatedMapper = mapperService.merge("type", new CompressedXContent(updatedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); - assertThat(updatedMapper.TTLFieldMapper().enabled(), equalTo(true)); - } - - public void testThatDisablingTTLReportsConflict() throws Exception { - String mappingWithTtl = getMappingWithTtlEnabled().string(); - String mappingWithTtlDisabled = getMappingWithTtlDisabled().string(); - MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService(); - DocumentMapper initialMapper = mapperService.merge("type", new CompressedXContent(mappingWithTtl), MapperService.MergeReason.MAPPING_UPDATE, false); - - try { - mapperService.merge("type", new CompressedXContent(mappingWithTtlDisabled), MapperService.MergeReason.MAPPING_UPDATE, false); - fail(); - } catch (IllegalArgumentException e) { - // expected - } - - assertThat(initialMapper.TTLFieldMapper().enabled(), equalTo(true)); - } - - public void testThatDisablingTTLReportsConflictOnCluster() throws Exception { - String mappingWithTtl = getMappingWithTtlEnabled().string(); - String mappingWithTtlDisabled = getMappingWithTtlDisabled().string(); - assertAcked(client().admin().indices().prepareCreate("testindex").setSettings(BW_SETTINGS).addMapping("type", mappingWithTtl)); - GetMappingsResponse mappingsBeforeUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get(); - try { - client().admin().indices().preparePutMapping("testindex").setSource(mappingWithTtlDisabled).setType("type").get(); - fail(); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("_ttl cannot be disabled once it was enabled.")); - } - GetMappingsResponse mappingsAfterUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get(); - assertThat(mappingsBeforeUpdateResponse.getMappings().get("testindex").get("type").source(), equalTo(mappingsAfterUpdateResponse.getMappings().get("testindex").get("type").source())); - } - - public void testThatEnablingTTLAfterFirstDisablingWorks() throws Exception { - String mappingWithTtl = getMappingWithTtlEnabled().string(); - String withTtlDisabled = getMappingWithTtlDisabled().string(); - assertAcked(client().admin().indices().prepareCreate("testindex").setSettings(BW_SETTINGS).addMapping("type", withTtlDisabled)); - GetMappingsResponse mappingsAfterUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get(); - assertThat(mappingsAfterUpdateResponse.getMappings().get("testindex").get("type").sourceAsMap().get("_ttl").toString(), equalTo("{enabled=false}")); - client().admin().indices().preparePutMapping("testindex").setSource(mappingWithTtl).setType("type").get(); - mappingsAfterUpdateResponse = client().admin().indices().prepareGetMappings("testindex").addTypes("type").get(); - assertThat(mappingsAfterUpdateResponse.getMappings().get("testindex").get("type").sourceAsMap().get("_ttl").toString(), equalTo("{enabled=true}")); - } - - public void testNoConflictIfNothingSetAndDisabledLater() throws Exception { - IndexService indexService = createIndex("testindex", BW_SETTINGS, "type"); - XContentBuilder mappingWithTtlDisabled = getMappingWithTtlDisabled("7d"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlDisabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false); - } - - public void testNoConflictIfNothingSetAndEnabledLater() throws Exception { - IndexService indexService = createIndex("testindex", BW_SETTINGS, "type"); - XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithTtlEnabled.string()), MapperService.MergeReason.MAPPING_UPDATE, false); - } - - public void testMergeWithOnlyDefaultSet() throws Exception { - XContentBuilder mappingWithTtlEnabled = getMappingWithTtlEnabled("7d"); - IndexService indexService = createIndex("testindex", BW_SETTINGS, "type", mappingWithTtlEnabled); - XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), - MapperService.MergeReason.MAPPING_UPDATE, false); - CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertEquals(JsonXContent.contentBuilder().startObject() - .startObject("type") - .startObject("_ttl") - .field("enabled", true) - .field("default", 360000) - .endObject() - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(), - mappingAfterMerge.string()); - } - - public void testMergeWithOnlyDefaultSetTtlDisabled() throws Exception { - XContentBuilder mappingWithTtlEnabled = getMappingWithTtlDisabled("7d"); - IndexService indexService = createIndex("testindex", BW_SETTINGS, "type", mappingWithTtlEnabled); - CompressedXContent mappingAfterCreation = indexService.mapperService().documentMapper("type").mappingSource(); - assertEquals(JsonXContent.contentBuilder().startObject() - .startObject("type") - .startObject("_ttl") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(), - mappingAfterCreation.string()); - XContentBuilder mappingWithOnlyDefaultSet = getMappingWithOnlyTtlDefaultSet("6m"); - indexService.mapperService().merge("type", new CompressedXContent(mappingWithOnlyDefaultSet.string()), - MapperService.MergeReason.MAPPING_UPDATE, false); - CompressedXContent mappingAfterMerge = indexService.mapperService().documentMapper("type").mappingSource(); - assertEquals(JsonXContent.contentBuilder().startObject() - .startObject("type") - .startObject("_ttl") - .field("enabled", false) - .endObject() - .startObject("properties") - .startObject("field") - .field("type", "string") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(), - mappingAfterMerge.string()); - } - - public void testIncludeInObjectNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl").field("enabled", true).endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - try { - docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_ttl", "2d").endObject().bytes()); - fail("Expected failure to parse metadata field"); - } catch (MapperParsingException e) { - assertTrue(e.getMessage(), e.getMessage().contains("Field [_ttl] is a metadata field and cannot be added inside a document")); - } - } - - private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithTtlEnabled() throws IOException { - return getMappingWithTtlEnabled(null); - } - - private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithTtlDisabled() throws IOException { - return getMappingWithTtlDisabled(null); - } - - private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithTtlEnabled(String defaultValue) throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("enabled", true); - if (defaultValue != null) { - mapping.field("default", defaultValue); - } - return mapping.endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject(); - } - - private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithTtlDisabled(String defaultValue) throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl") - .field("enabled", false); - if (defaultValue != null) { - mapping.field("default", defaultValue); - } - return mapping.endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject(); - } - - private org.elasticsearch.common.xcontent.XContentBuilder getMappingWithOnlyTtlDefaultSet(String defaultValue) throws IOException { - return XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_ttl").field("default", defaultValue).endObject() - .startObject("properties").field("field").startObject().field("type", "text").endObject().endObject() - .endObject().endObject(); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index 846d2c56669..e234beb7904 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -28,10 +28,7 @@ import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -43,7 +40,6 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import org.junit.Before; import java.io.IOException; @@ -53,7 +49,6 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -63,7 +58,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser; @Before - public void before() { + public void setup() { indexService = createIndex("test"); parser = indexService.mapperService().documentMapperParser(); } @@ -573,23 +568,5 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // empty name allowed in index created before 5.0 - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - parser = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser.parse("type", new CompressedXContent(mapping)); - String downgradedMapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("properties") - .startObject("") - .field("type", "string") - .field("fielddata", false) - .endObject() - .endObject() - .endObject().endObject().string(); - assertEquals(downgradedMapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldMapperTests.java deleted file mode 100644 index 6b156fa36e1..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldMapperTests.java +++ /dev/null @@ -1,459 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper; - -import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; -import org.elasticsearch.action.TimestampParsingException; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.cluster.metadata.MetaData; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.joda.Joda; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.MapperService.MergeReason; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.Arrays; -import java.util.Collection; -import java.util.LinkedHashMap; - -import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.startsWith; - -public class TimestampFieldMapperTests extends ESSingleNodeTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testRejectedOn5x() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp") - .field("enabled", true) - .endObject() - .endObject().endObject().string(); - IndexService index = createIndex("test"); - IllegalArgumentException expected = expectThrows(IllegalArgumentException.class, - () -> index.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false)); - assertThat(expected.getMessage(), startsWith("[_timestamp] is removed")); - } - - public void testSimpleDisabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).timestamp(1)); - - assertThat(doc.rootDoc().getField("_timestamp"), equalTo(null)); - } - - public void testEnabled() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", "yes").endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - BytesReference source = XContentFactory.jsonBuilder() - .startObject() - .field("field", "value") - .endObject() - .bytes(); - ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source).timestamp(1)); - - assertThat(doc.rootDoc().getField("_timestamp").fieldType().stored(), equalTo(true)); - assertNotSame(IndexOptions.NONE, doc.rootDoc().getField("_timestamp").fieldType().indexOptions()); - assertThat(doc.rootDoc().getField("_timestamp").tokenStream(docMapper.mappers().indexAnalyzer(), null), notNullValue()); - } - - public void testDefaultValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0_beta1, Version.V_5_0_0_alpha3); - for (String mapping : Arrays.asList( - XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(), - XContentFactory.jsonBuilder().startObject().startObject("type").startObject("_timestamp").endObject().endObject().endObject().string())) { - DocumentMapper docMapper = createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - assertThat(docMapper.timestampFieldMapper().enabled(), equalTo(TimestampFieldMapper.Defaults.ENABLED.enabled)); - assertThat(docMapper.timestampFieldMapper().fieldType().stored(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); - assertThat(docMapper.timestampFieldMapper().fieldType().indexOptions(), equalTo(TimestampFieldMapper.Defaults.FIELD_TYPE.indexOptions())); - assertThat(docMapper.timestampFieldMapper().fieldType().hasDocValues(), equalTo(version.onOrAfter(Version.V_2_0_0_beta1))); - assertThat(docMapper.timestampFieldMapper().fieldType().dateTimeFormatter().format(), equalTo(TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT)); - assertAcked(client().admin().indices().prepareDelete("test").execute().get()); - } - } - - public void testThatDisablingDuringMergeIsWorking() throws Exception { - String enabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).endObject() - .endObject().endObject().string(); - MapperService mapperService = createIndex("test", BW_SETTINGS).mapperService(); - DocumentMapper enabledMapper = mapperService.merge("type", new CompressedXContent(enabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", false).endObject() - .endObject().endObject().string(); - DocumentMapper disabledMapper = mapperService.merge("type", new CompressedXContent(disabledMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - assertThat(enabledMapper.timestampFieldMapper().enabled(), is(true)); - assertThat(disabledMapper.timestampFieldMapper().enabled(), is(false)); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testTimestampMissingDefaultToEpochValue() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("default", "1970-01-01") - .field("format", "YYYY-MM-dd") - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(mappingMetaData, true, "test"); - assertThat(request.timestamp(), notNullValue()); - assertThat(request.timestamp(), is(MappingMetaData.Timestamp.parseStringTimestamp("1970-01-01", Joda.forPattern("YYYY-MM-dd")))); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testTimestampMissingNowDefaultValue() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("default", "now") - .field("format", "YYYY-MM-dd") - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(mappingMetaData, true, "test"); - assertThat(request.timestamp(), notNullValue()); - - // We should have less than one minute (probably some ms) - long delay = System.currentTimeMillis() - Long.parseLong(request.timestamp()); - assertThat(delay, lessThanOrEqualTo(60000L)); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testPathMissingWithForcedNullDefaultShouldFail() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("path", "timestamp") - .field("default", (String) null) - .endObject() - .endObject().endObject(); - TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService() - .documentMapperParser().parse("type", new CompressedXContent(mapping.string()))); - assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null")); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testTimestampMissingWithForcedNullDefaultShouldFail() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("default", (String) null) - .endObject() - .endObject().endObject(); - - TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService() - .documentMapperParser().parse("type", new CompressedXContent(mapping.string()))); - assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set to null")); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testTimestampDefaultAndIgnore() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .field("default", "1971-12-26") - .field("ignore_missing", false) - .endObject() - .endObject().endObject(); - - TimestampParsingException e = expectThrows(TimestampParsingException.class, () -> createIndex("test", BW_SETTINGS).mapperService() - .documentMapperParser().parse("type", new CompressedXContent(mapping.string()))); - assertThat(e.getDetailedMessage(), containsString("default timestamp can not be set with ignore_missing set to false")); - } - - // Issue 4718: was throwing a TimestampParsingException: failed to parse timestamp [null] - public void testTimestampMissingShouldNotFail() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", "yes") - .endObject() - .endObject().endObject(); - XContentBuilder doc = XContentFactory.jsonBuilder() - .startObject() - .field("foo", "bar") - .endObject(); - - MetaData metaData = MetaData.builder().build(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping.string())); - - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - - IndexRequest request = new IndexRequest("test", "type", "1").source(doc); - request.process(mappingMetaData, true, "test"); - - assertThat(request.timestamp(), notNullValue()); - - // We should have less than one minute (probably some ms) - long delay = System.currentTimeMillis() - Long.parseLong(request.timestamp()); - assertThat(delay, lessThanOrEqualTo(60000L)); - } - - public void testDefaultTimestampStream() throws IOException { - // Testing null value for default timestamp - { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, - TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, null, null); - MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Routing(false), timestamp, false); - - BytesStreamOutput out = new BytesStreamOutput(); - expected.writeTo(out); - out.close(); - BytesReference bytes = out.bytes(); - - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); - - assertThat(metaData, is(expected)); - } - - // Testing "now" value for default timestamp - { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, - TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", null); - MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Routing(false), timestamp, false); - - BytesStreamOutput out = new BytesStreamOutput(); - expected.writeTo(out); - out.close(); - BytesReference bytes = out.bytes(); - - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); - - assertThat(metaData, is(expected)); - } - - // Testing "ignore_missing" value for default timestamp - { - MappingMetaData.Timestamp timestamp = new MappingMetaData.Timestamp(true, - TimestampFieldMapper.DEFAULT_DATE_TIME_FORMAT, "now", false); - MappingMetaData expected = new MappingMetaData("type", new CompressedXContent("{}".getBytes(StandardCharsets.UTF_8)), - new MappingMetaData.Routing(false), timestamp, false); - - BytesStreamOutput out = new BytesStreamOutput(); - expected.writeTo(out); - out.close(); - BytesReference bytes = out.bytes(); - - MappingMetaData metaData = MappingMetaData.PROTO.readFrom(bytes.streamInput()); - - assertThat(metaData, is(expected)); - } - } - - public void testParsingNotDefaultTwiceDoesNotChangeMapping() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp") - .field("enabled", true) - .field("default", "1970-01-01") - .endObject().endObject().endObject().string(); - DocumentMapperParser parser = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser(); - - DocumentMapper docMapper = parser.parse("type", new CompressedXContent(mapping)); - docMapper = parser.parse("type", docMapper.mappingSource()); - assertThat(docMapper.mappingSource().string(), equalTo(mapping)); - } - - /** - * Test for issue #9223 - */ - public void testInitMappers() throws IOException { - String mapping = XContentFactory.jsonBuilder().startObject() - .startObject("type") - .startObject("_timestamp") - .field("enabled", true) - .field("default", (String) null) - .endObject() - .endObject().endObject().string(); - // This was causing a NPE - new MappingMetaData(new CompressedXContent(mapping)); - } - - void assertConflict(MapperService mapperService, String type, String mapping1, String mapping2, String conflict) throws IOException { - mapperService.merge("type", new CompressedXContent(mapping1), MapperService.MergeReason.MAPPING_UPDATE, false); - try { - mapperService.merge("type", new CompressedXContent(mapping2), MapperService.MergeReason.MAPPING_UPDATE, false); - assertNull(conflict); - } catch (IllegalArgumentException e) { - assertNotNull(conflict); - assertThat(e.getMessage(), containsString(conflict)); - } - } - - public void testIncludeInObjectNotAllowed() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("default", "1970").field("format", "YYYY").endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - try { - docMapper.parse("test", "type", "1", XContentFactory.jsonBuilder() - .startObject().field("_timestamp", 2000000).endObject().bytes()); - fail("Expected failure to parse metadata field"); - } catch (MapperParsingException e) { - assertTrue(e.getMessage(), e.getMessage().contains("Field [_timestamp] is a metadata field and cannot be added inside a document")); - } - } - - public void testThatEpochCanBeIgnoredWithCustomFormat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("format", "yyyyMMddHH").endObject() - .endObject().endObject().string(); - DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); - - XContentBuilder doc = XContentFactory.jsonBuilder().startObject().endObject(); - IndexRequest request = new IndexRequest("test", "type", "1").source(doc).timestamp("2015060210"); - MappingMetaData mappingMetaData = new MappingMetaData(docMapper); - request.process(mappingMetaData, true, "test"); - - assertThat(request.timestamp(), is("1433239200000")); - } - - public void testThatIndicesAfter2_0DontSupportUnixTimestampsInAnyDateFormat() throws Exception { - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).field("format", "dateOptionalTime").endObject() - .endObject().endObject().string(); - BytesReference source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject().bytes(); - // test with 2.x - DocumentMapper currentMapper = createIndex("new-index", BW_SETTINGS).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - - // this works with 2.x - IndexRequest request = new IndexRequest("new-index", "type", "1").source(source).timestamp("1970-01-01"); - request.process(new MappingMetaData(currentMapper), true, "new-index"); - - // this fails with 2.x - request = new IndexRequest("new-index", "type", "1").source(source).timestamp("1234567890"); - try { - request.process(new MappingMetaData(currentMapper), true, "new-index"); - } catch (Exception e) { - assertThat(e.getCause(), instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), containsString("failed to parse timestamp [1234567890]")); - } - } - - public void testSizeTimestampIndexParsing() throws IOException { - IndexService indexService = createIndex("test", BW_SETTINGS); - String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/update/default_mapping_with_disabled_root_types.json"); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(mapping), true); - assertThat(documentMapper.mappingSource().string(), equalTo(mapping)); - documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); - assertThat(documentMapper.mappingSource().string(), equalTo(mapping)); - } - - public void testDefaultApplied() throws IOException { - createIndex("test1", BW_SETTINGS); - createIndex("test2", BW_SETTINGS); - XContentBuilder defaultMapping = XContentFactory.jsonBuilder().startObject() - .startObject(MapperService.DEFAULT_MAPPING).startObject("_timestamp").field("enabled", true).endObject().endObject() - .endObject(); - client().admin().indices().preparePutMapping().setType(MapperService.DEFAULT_MAPPING).setSource(defaultMapping).get(); - XContentBuilder typeMapping = XContentFactory.jsonBuilder().startObject() - .startObject("type").startObject("_all").field("enabled", false).endObject().endObject() - .endObject(); - client().admin().indices().preparePutMapping("test1").setType("type").setSource(typeMapping).get(); - client().admin().indices().preparePutMapping("test1", "test2").setType("type").setSource(typeMapping).get(); - - GetMappingsResponse response = client().admin().indices().prepareGetMappings("test2").get(); - assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_all")); - assertFalse((Boolean) ((LinkedHashMap) response.getMappings().get("test2").get("type").getSourceAsMap().get("_all")).get("enabled")); - assertNotNull(response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp")); - assertTrue((Boolean)((LinkedHashMap)response.getMappings().get("test2").get("type").getSourceAsMap().get("_timestamp")).get("enabled")); - } - - public void testTimestampParsing() throws IOException { - IndexService indexService = createIndex("test", BW_SETTINGS); - XContentBuilder indexMapping = XContentFactory.jsonBuilder(); - boolean enabled = randomBoolean(); - indexMapping.startObject() - .startObject("type") - .startObject("_timestamp") - .field("enabled", enabled) - .endObject() - .endObject() - .endObject(); - DocumentMapper documentMapper = indexService.mapperService().parse("type", new CompressedXContent(indexMapping.string()), true); - assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); - assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); - assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues()); - documentMapper = indexService.mapperService().parse("type", new CompressedXContent(documentMapper.mappingSource().string()), true); - assertThat(documentMapper.timestampFieldMapper().enabled(), equalTo(enabled)); - assertTrue(documentMapper.timestampFieldMapper().fieldType().hasDocValues()); - assertTrue(documentMapper.timestampFieldMapper().fieldType().stored()); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldTypeTests.java deleted file mode 100644 index 53c0c89f8c3..00000000000 --- a/core/src/test/java/org/elasticsearch/index/mapper/TimestampFieldTypeTests.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.index.mapper; - -public class TimestampFieldTypeTests extends LegacyDateFieldTypeTests { - @Override - protected MappedFieldType createDefaultFieldType() { - return new TimestampFieldMapper.TimestampFieldType(); - } - - @Override - public void testValueForSearch() { - MappedFieldType ft = createDefaultFieldType(); - String date = "2015-10-12T12:09:55.000Z"; - long instant = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(date).getMillis(); - assertEquals(instant, ft.valueForDisplay(instant)); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java index 835295def4f..02128a4254a 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/TokenCountFieldMapperTests.java @@ -125,14 +125,5 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase { () -> parser.parse("type", new CompressedXContent(mapping)) ); assertThat(e.getMessage(), containsString("name cannot be empty string")); - - // empty name allowed in index created before 5.0 - Version oldVersion = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_5); - Settings oldIndexSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, oldVersion).build(); - indexService = createIndex("test_old", oldIndexSettings); - DocumentMapperParser parser2x = indexService.mapperService().documentMapperParser(); - - DocumentMapper defaultMapper = parser2x.parse("type", new CompressedXContent(mapping)); - assertEquals(mapping, defaultMapper.mappingSource().string()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java index 3573cfd8b60..6fade26ca02 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java @@ -19,15 +19,9 @@ package org.elasticsearch.index.mapper; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData; -import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData; -import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -52,14 +46,4 @@ public class TypeFieldMapperTests extends ESSingleNodeTestCase { assertThat(typeMapper.fieldType().fielddataBuilder(), instanceOf(DocValuesIndexFieldData.Builder.class)); } - public void testDocValuesPre21() throws Exception { - // between 2.0 and 2.1, doc values was disabled for _type - String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string(); - Settings bwcSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_0_0_beta1.id).build(); - - DocumentMapper docMapper = createIndex("test", bwcSettings).mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); - TypeFieldMapper typeMapper = docMapper.metadataMapper(TypeFieldMapper.class); - assertFalse(typeMapper.fieldType().hasDocValues()); - assertThat(typeMapper.fieldType().fielddataBuilder(), instanceOf(PagedBytesIndexFieldData.Builder.class)); - } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java index 73cf070cba7..7aec1ecd0bb 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/UpdateMappingTests.java @@ -235,30 +235,6 @@ public class UpdateMappingTests extends ESSingleNodeTestCase { } } - public void testReuseMetaFieldBackCompat() throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("_id").field("type", "text").endObject() - .endObject().endObject().endObject(); - // the logic is different for 2.x indices since they record some meta mappers (including _id) - // in the root object - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_1_0).build(); - MapperService mapperService = createIndex("test", settings).mapperService(); - - try { - mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); - fail(); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); - } - - try { - mapperService.merge("type", new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE, false); - fail(); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("Field [_id] is defined twice in [type]")); - } - } - public void testRejectFieldDefinedTwice() throws IOException { String mapping1 = XContentFactory.jsonBuilder().startObject() .startObject("type1") diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java index 71be96c1688..49266ebe9fd 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoBoundingBoxQueryBuilderTests.java @@ -22,16 +22,13 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.spatial.geopoint.search.GeoPointInBBoxQuery; import org.elasticsearch.Version; -import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.search.geo.LegacyInMemoryGeoBoundingBoxQuery; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.geo.RandomShapeGenerator; @@ -43,7 +40,6 @@ import java.io.IOException; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase { @@ -224,31 +220,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase 0); - GeoBoundingBoxQueryBuilder qb = createTestQueryBuilder(); - if (getCurrentTypes().length != 0 && "mapped_geo".equals(qb.fieldName())) { - // only execute this test if we are running on a valid geo field - qb.setCorners(200, 200, qb.bottomRight().getLat(), qb.bottomRight().getLon()); - qb.setValidationMethod(GeoValidationMethod.COERCE); - Query query = qb.toQuery(createShardContext()); - if (query instanceof ConstantScoreQuery) { - ConstantScoreQuery result = (ConstantScoreQuery) query; - BooleanQuery bboxFilter = (BooleanQuery) result.getQuery(); - for (BooleanClause clause : bboxFilter.clauses()) { - LegacyNumericRangeQuery boundary = (LegacyNumericRangeQuery) clause.getQuery(); - if (boundary.getMax() != null) { - assertTrue("If defined, non of the maximum range values should be larger than 180", - boundary.getMax().intValue() <= 180); - } - } - } else { - assertTrue("memory queries should result in LegacyInMemoryGeoBoundingBoxQuery", - query instanceof LegacyInMemoryGeoBoundingBoxQuery); - } - } - } - public void testStrictnessDefault() { assertFalse("Someone changed the default for coordinate validation - were the docs changed as well?", GeoValidationMethod.DEFAULT_LENIENT_PARSING); @@ -261,18 +232,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase parseQuery(deprecatedJson)); - assertEquals("Deprecated field [geo_bbox] used, expected [geo_bounding_box] instead", e.getMessage()); } public void testFromJsonCoerceFails() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java index 3373623b6e9..6c92fde6843 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceQueryBuilderTests.java @@ -21,15 +21,10 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery; -import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.geo.RandomShapeGenerator; @@ -40,8 +35,6 @@ import java.io.IOException; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase { @@ -131,48 +124,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase 0); Query parsedQuery = parseQuery(query).toQuery(createShardContext()); - Version version = createShardContext().indexVersionCreated(); - if (version.before(Version.V_2_2_0)) { - GeoDistanceRangeQuery q = (GeoDistanceRangeQuery) parsedQuery; - assertThat(q.fieldName(), equalTo(GEO_POINT_FIELD_NAME)); - assertThat(q.lat(), closeTo(lat, 1E-5D)); - assertThat(q.lon(), closeTo(lon, 1E-5D)); - assertThat(q.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY)); - assertThat(q.maxInclusiveDistance(), closeTo(distanceUnit.convert(distance, DistanceUnit.MILES), 1E-5D)); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - GeoPointDistanceQuery q = (GeoPointDistanceQuery) parsedQuery; - assertThat(q.getField(), equalTo(GEO_POINT_FIELD_NAME)); - assertThat(q.getCenterLat(), closeTo(lat, 1E-5D)); - assertThat(q.getCenterLon(), closeTo(lon, 1E-5D)); - assertThat(q.getRadiusMeters(), closeTo(distanceUnit.convert(distance, DistanceUnit.MILES), 1E-5D)); - } + // TODO: what can we check? } public void testFromJson() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java deleted file mode 100644 index e2c04834e36..00000000000 --- a/core/src/test/java/org/elasticsearch/index/query/GeoDistanceRangeQueryTests.java +++ /dev/null @@ -1,397 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.search.XGeoPointDistanceRangeQuery; -import org.apache.lucene.util.NumericUtils; -import org.elasticsearch.Version; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.test.AbstractQueryTestCase; -import org.elasticsearch.test.geo.RandomGeoGenerator; - -import java.io.IOException; - -import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; - -public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase { - - @Override - protected GeoDistanceRangeQueryBuilder doCreateTestQueryBuilder() { - GeoDistanceRangeQueryBuilder builder; - GeoPoint randomPoint = RandomGeoGenerator.randomPointIn(random(), -180.0, -89.9, 180.0, 89.9); - if (randomBoolean()) { - builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, randomPoint.geohash()); - } else { - if (randomBoolean()) { - builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, randomPoint); - } else { - builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, randomPoint.lat(), randomPoint.lon()); - } - } - GeoPoint point = builder.point(); - final double maxRadius = GeoUtils.maxRadialDistanceMeters(point.lat(), point.lon()); - final int fromValueMeters = randomInt((int)(maxRadius*0.5)); - final int toValueMeters = randomIntBetween(fromValueMeters + 1, (int)maxRadius); - DistanceUnit fromToUnits = randomFrom(DistanceUnit.values()); - final String fromToUnitsStr = fromToUnits.toString(); - final double fromValue = DistanceUnit.convert(fromValueMeters, DistanceUnit.DEFAULT, fromToUnits); - final double toValue = DistanceUnit.convert(toValueMeters, DistanceUnit.DEFAULT, fromToUnits); - - if (randomBoolean()) { - int branch = randomInt(2); - fromToUnits = DistanceUnit.DEFAULT; - switch (branch) { - case 0: - builder.from(fromValueMeters); - break; - case 1: - builder.to(toValueMeters); - break; - case 2: - builder.from(fromValueMeters); - builder.to(toValueMeters); - break; - } - } else { - int branch = randomInt(2); - switch (branch) { - case 0: - builder.from(fromValue + fromToUnitsStr); - break; - case 1: - builder.to(toValue + fromToUnitsStr); - break; - case 2: - builder.from(fromValue + fromToUnitsStr); - builder.to(toValue + fromToUnitsStr); - break; - } - } - if (randomBoolean()) { - builder.includeLower(randomBoolean()); - } - if (randomBoolean()) { - builder.includeUpper(randomBoolean()); - } - if (randomBoolean()) { - builder.geoDistance(randomFrom(GeoDistance.values())); - } - builder.unit(fromToUnits); - if (randomBoolean()) { - builder.setValidationMethod(randomFrom(GeoValidationMethod.values())); - } - - if (randomBoolean()) { - builder.ignoreUnmapped(randomBoolean()); - } - return builder; - } - - @Override - protected void doAssertLuceneQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query, SearchContext context) - throws IOException { - Version version = context.getQueryShardContext().indexVersionCreated(); - if (version.before(Version.V_2_2_0)) { - assertLegacyQuery(queryBuilder, query); - } else { - assertGeoPointQuery(queryBuilder, query); - } - } - - private void assertLegacyQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query) throws IOException { - assertThat(query, instanceOf(GeoDistanceRangeQuery.class)); - GeoDistanceRangeQuery geoQuery = (GeoDistanceRangeQuery) query; - assertThat(geoQuery.fieldName(), equalTo(queryBuilder.fieldName())); - if (queryBuilder.point() != null) { - GeoPoint expectedPoint = new GeoPoint(queryBuilder.point()); - if (GeoValidationMethod.isCoerce(queryBuilder.getValidationMethod())) { - GeoUtils.normalizePoint(expectedPoint, true, true); - } - assertThat(geoQuery.lat(), equalTo(expectedPoint.lat())); - assertThat(geoQuery.lon(), equalTo(expectedPoint.lon())); - } - assertThat(geoQuery.geoDistance(), equalTo(queryBuilder.geoDistance())); - if (queryBuilder.from() != null && queryBuilder.from() instanceof Number) { - double fromValue = ((Number) queryBuilder.from()).doubleValue(); - if (queryBuilder.unit() != null) { - fromValue = queryBuilder.unit().toMeters(fromValue); - } - if (queryBuilder.geoDistance() != null) { - fromValue = queryBuilder.geoDistance().normalize(fromValue, DistanceUnit.DEFAULT); - } - double fromSlop = Math.abs(fromValue) / 1000; - if (queryBuilder.includeLower() == false) { - fromSlop = NumericUtils.sortableLongToDouble((NumericUtils.doubleToSortableLong(Math.abs(fromValue)) + 1L)) / 1000.0; - } - assertThat(geoQuery.minInclusiveDistance(), closeTo(fromValue, fromSlop)); - } - if (queryBuilder.to() != null && queryBuilder.to() instanceof Number) { - double toValue = ((Number) queryBuilder.to()).doubleValue(); - if (queryBuilder.unit() != null) { - toValue = queryBuilder.unit().toMeters(toValue); - } - if (queryBuilder.geoDistance() != null) { - toValue = queryBuilder.geoDistance().normalize(toValue, DistanceUnit.DEFAULT); - } - double toSlop = Math.abs(toValue) / 1000; - if (queryBuilder.includeUpper() == false) { - toSlop = NumericUtils.sortableLongToDouble((NumericUtils.doubleToSortableLong(Math.abs(toValue)) - 1L)) / 1000.0; - } - assertThat(geoQuery.maxInclusiveDistance(), closeTo(toValue, toSlop)); - } - } - - private void assertGeoPointQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query) throws IOException { - assertThat(query, instanceOf(XGeoPointDistanceRangeQuery.class)); - XGeoPointDistanceRangeQuery geoQuery = (XGeoPointDistanceRangeQuery) query; - assertThat(geoQuery.getField(), equalTo(queryBuilder.fieldName())); - if (queryBuilder.point() != null) { - GeoPoint expectedPoint = new GeoPoint(queryBuilder.point()); - GeoUtils.normalizePoint(expectedPoint); - assertThat(geoQuery.getCenterLat(), equalTo(expectedPoint.lat())); - assertThat(geoQuery.getCenterLon(), equalTo(expectedPoint.lon())); - } - if (queryBuilder.from() != null && queryBuilder.from() instanceof Number) { - double fromValue = ((Number) queryBuilder.from()).doubleValue(); - if (queryBuilder.unit() != null) { - fromValue = queryBuilder.unit().toMeters(fromValue); - } - assertThat(geoQuery.getMinRadiusMeters(), closeTo(fromValue, 1E-5)); - } - if (queryBuilder.to() != null && queryBuilder.to() instanceof Number) { - double toValue = ((Number) queryBuilder.to()).doubleValue(); - if (queryBuilder.unit() != null) { - toValue = queryBuilder.unit().toMeters(toValue); - } - assertThat(geoQuery.getMaxRadiusMeters(), closeTo(toValue, 1E-5)); - } - } - - /** - * Overridden here to ensure the test is only run if at least one type is - * present in the mappings. Geo queries do not execute if the field is not - * explicitly mapped - */ - @Override - public void testToQuery() throws IOException { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - if (createShardContext().indexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - super.testToQuery(); - } - } - - public void testNullFieldName() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new GeoDistanceRangeQueryBuilder(null, new GeoPoint())); - assertEquals("fieldName must not be null", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, - () -> new GeoDistanceRangeQueryBuilder("", new GeoPoint())); - assertEquals("fieldName must not be null", e.getMessage()); - } - - public void testNoPoint() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, (GeoPoint) null)); - assertEquals("point must not be null", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, - () -> new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, (String) null)); - assertEquals("point must not be null", e.getMessage()); - } - - public void testInvalidFrom() { - GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.from((String) null)); - assertEquals("[from] must not be null", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, () -> builder.from((Number) null)); - assertEquals("[from] must not be null", e.getMessage()); - } - - public void testInvalidTo() { - GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.to((String) null)); - assertEquals("[to] must not be null", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, () -> builder.to((Number) null)); - assertEquals("[to] must not be null", e.getMessage()); - } - - public void testInvalidGeoDistance() { - GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.geoDistance(null)); - assertEquals("geoDistance calculation mode must not be null", e.getMessage()); - } - - public void testInvalidDistanceUnit() { - GeoDistanceRangeQueryBuilder builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.unit(null)); - assertEquals("distance unit must not be null", e.getMessage()); - } - - public void testNestedRangeQuery() throws IOException { - // geo distance range queries are no longer supported in 5.0 they are replaced by using aggregations or sort - if (createShardContext().indexVersionCreated().onOrAfter(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - return; - } - - // create a nested geo_point type with a subfield named "geohash" (explicit testing for ISSUE #15179) - MapperService mapperService = createShardContext().getMapperService(); - String nestedMapping = - "{\"nested_doc\" : {\"properties\" : {" + - "\"locations\": {\"properties\": {" + - "\"geohash\": {\"type\": \"geo_point\"}}," + - "\"type\": \"nested\"}" + - "}}}"; - mapperService.merge("nested_doc", new CompressedXContent(nestedMapping), MapperService.MergeReason.MAPPING_UPDATE, false); - - // create a range query on the nested locations.geohash sub-field - String queryJson = - "{\n" + - " \"nested\": {\n" + - " \"path\": \"locations\",\n" + - " \"query\": {\n" + - " \"geo_distance_range\": {\n" + - " \"from\": \"0.0km\",\n" + - " \"to\" : \"200.0km\",\n" + - " \"locations.geohash\": \"s7ws01wyd7ws\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}\n"; - NestedQueryBuilder builder = (NestedQueryBuilder) parseQuery(queryJson); - QueryShardContext context = createShardContext(); - builder.toQuery(context); - } - - public void testFromJson() throws IOException { - String json = - "{\n" + - " \"geo_distance_range\" : {\n" + - " \"pin.location\" : [ -70.0, 40.0 ],\n" + - " \"from\" : \"200km\",\n" + - " \"to\" : \"400km\",\n" + - " \"include_lower\" : true,\n" + - " \"include_upper\" : true,\n" + - " \"unit\" : \"m\",\n" + - " \"distance_type\" : \"sloppy_arc\",\n" + - " \"validation_method\" : \"STRICT\",\n" + - " \"ignore_unmapped\" : false,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - "}"; - GeoDistanceRangeQueryBuilder parsed = (GeoDistanceRangeQueryBuilder) parseQuery(json); - checkGeneratedJson(json, parsed); - assertEquals(json, -70.0, parsed.point().lon(), 0.0001); - } - - public void testFromJsonOptimizeBboxFails() throws IOException { - String json = - "{\n" + - " \"geo_distance_range\" : {\n" + - " \"pin.location\" : [ -70.0, 40.0 ],\n" + - " \"from\" : \"200km\",\n" + - " \"to\" : \"400km\",\n" + - " \"include_lower\" : true,\n" + - " \"include_upper\" : true,\n" + - " \"unit\" : \"m\",\n" + - " \"distance_type\" : \"sloppy_arc\",\n" + - " \"optimize_bbox\" : \"memory\",\n" + - " \"ignore_unmapped\" : false,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - "}"; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); - } - - public void testFromJsonCoerceFails() throws IOException { - String json = - "{\n" + - " \"geo_distance_range\" : {\n" + - " \"pin.location\" : [ -70.0, 40.0 ],\n" + - " \"from\" : \"200km\",\n" + - " \"to\" : \"400km\",\n" + - " \"include_lower\" : true,\n" + - " \"include_upper\" : true,\n" + - " \"unit\" : \"m\",\n" + - " \"distance_type\" : \"sloppy_arc\",\n" + - " \"coerce\" : true,\n" + - " \"ignore_unmapped\" : false,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - "}"; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); - } - - public void testFromJsonIgnoreMalformedFails() throws IOException { - String json = - "{\n" + - " \"geo_distance_range\" : {\n" + - " \"pin.location\" : [ -70.0, 40.0 ],\n" + - " \"from\" : \"200km\",\n" + - " \"to\" : \"400km\",\n" + - " \"include_lower\" : true,\n" + - " \"include_upper\" : true,\n" + - " \"unit\" : \"m\",\n" + - " \"distance_type\" : \"sloppy_arc\",\n" + - " \"ignore_malformed\" : true,\n" + - " \"ignore_unmapped\" : false,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - "}"; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> parseQuery(json)); - assertTrue(e.getMessage().startsWith("Deprecated field ")); - } - - @Override - public void testMustRewrite() throws IOException { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - if (createShardContext().indexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - super.testMustRewrite(); - } - } - - public void testIgnoreUnmapped() throws IOException { - final GeoDistanceRangeQueryBuilder queryBuilder = new GeoDistanceRangeQueryBuilder("unmapped", new GeoPoint(0.0, 0.0)).from("20m"); - queryBuilder.ignoreUnmapped(true); - Query query = queryBuilder.toQuery(createShardContext()); - assertThat(query, notNullValue()); - assertThat(query, instanceOf(MatchNoDocsQuery.class)); - - final GeoDistanceRangeQueryBuilder failingQueryBuilder = new GeoDistanceRangeQueryBuilder("unmapped", new GeoPoint(0.0, 0.0)) - .from("20m"); - failingQueryBuilder.ignoreUnmapped(false); - QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(createShardContext())); - assertThat(e.getMessage(), containsString("failed to find geo_point field [unmapped]")); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java index 97505a6ee8e..b77ff3bbdef 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoPolygonQueryBuilderTests.java @@ -22,16 +22,11 @@ package org.elasticsearch.index.query; import com.vividsolutions.jts.geom.Coordinate; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery; -import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.search.geo.GeoPolygonQuery; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.geo.RandomShapeGenerator; @@ -47,8 +42,6 @@ import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.notNullValue; -import static org.hamcrest.Matchers.closeTo; -import static org.hamcrest.Matchers.equalTo; public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase { @Override @@ -67,55 +60,9 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase queryBuilderPoints = queryBuilder.points(); - GeoPoint[] queryPoints = geoQuery.points(); - assertThat(queryPoints.length, equalTo(queryBuilderPoints.size())); - if (GeoValidationMethod.isCoerce(queryBuilder.getValidationMethod())) { - for (int i = 0; i < queryBuilderPoints.size(); i++) { - GeoPoint queryBuilderPoint = queryBuilderPoints.get(i); - GeoPoint pointCopy = new GeoPoint(queryBuilderPoint); - GeoUtils.normalizePoint(pointCopy, true, true); - assertThat(queryPoints[i], equalTo(pointCopy)); - } - } else { - for (int i = 0; i < queryBuilderPoints.size(); i++) { - assertThat(queryPoints[i], equalTo(queryBuilderPoints.get(i))); - } - } - } - - private void assertGeoPointQuery(GeoPolygonQueryBuilder queryBuilder, Query query) { - assertThat(query, instanceOf(GeoPointInPolygonQuery.class)); - GeoPointInPolygonQuery geoQuery = (GeoPointInPolygonQuery) query; - assertThat(geoQuery.getField(), equalTo(queryBuilder.fieldName())); - List queryBuilderPoints = queryBuilder.points(); - assertEquals(1, geoQuery.getPolygons().length); - double[] lats = geoQuery.getPolygons()[0].getPolyLats(); - double[] lons = geoQuery.getPolygons()[0].getPolyLons(); - assertThat(lats.length, equalTo(queryBuilderPoints.size())); - assertThat(lons.length, equalTo(queryBuilderPoints.size())); - for (int i=0; i < queryBuilderPoints.size(); ++i) { - final GeoPoint queryBuilderPoint = queryBuilderPoints.get(i); - final GeoPoint pointCopy = new GeoPoint(queryBuilderPoint); - GeoUtils.normalizePoint(pointCopy); - assertThat(lats[i], closeTo(pointCopy.getLat(), 1E-5D)); - assertThat(lons[i], closeTo(pointCopy.getLon(), 1E-5D)); - } - } - /** * Overridden here to ensure the test is only run if at least one type is * present in the mappings. Geo queries do not execute if the field is not @@ -124,9 +71,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase 0); - if (createShardContext().indexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - super.testToQuery(); - } + super.testToQuery(); } private static List randomPolygon() { @@ -287,38 +232,9 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase { - - @Override - protected Builder doCreateTestQueryBuilder() { - GeohashCellQuery.Builder builder = new Builder(GEO_POINT_FIELD_NAME, randomGeohash(1, 12)); - if (randomBoolean()) { - builder.neighbors(randomBoolean()); - } - if (randomBoolean()) { - if (randomBoolean()) { - builder.precision(randomIntBetween(1, 12)); - } else { - builder.precision(randomIntBetween(1, 1000000) + randomFrom(DistanceUnit.values()).toString()); - } - } - if (randomBoolean()) { - builder.ignoreUnmapped(randomBoolean()); - } - return builder; - } - - @Override - protected void doAssertLuceneQuery(Builder queryBuilder, Query query, SearchContext context) throws IOException { - if (queryBuilder.neighbors()) { - assertThat(query, instanceOf(TermsQuery.class)); - } else { - assertThat(query, instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) query; - Term term = termQuery.getTerm(); - assertThat(term.field(), equalTo(queryBuilder.fieldName() + "." + GeoPointFieldMapper.Names.GEOHASH)); - String geohash = queryBuilder.geohash(); - if (queryBuilder.precision() != null) { - int len = Math.min(queryBuilder.precision(), geohash.length()); - geohash = geohash.substring(0, len); - } - assertThat(term.text(), equalTo(geohash)); - } - } - - /** - * Overridden here to ensure the test is only run if at least one type is - * present in the mappings. Geo queries do not execute if the field is not - * explicitly mapped - */ - @Override - public void testToQuery() throws IOException { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - Version version = createShardContext().indexVersionCreated(); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - super.testToQuery(); - } - } - - public void testNullField() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new Builder(null, new GeoPoint())); - assertEquals("fieldName must not be null", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, () -> new Builder("", new GeoPoint())); - assertEquals("fieldName must not be null", e.getMessage()); - } - - public void testNullGeoPoint() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new Builder(GEO_POINT_FIELD_NAME, (GeoPoint) null)); - assertEquals("geohash or point must be defined", e.getMessage()); - e = expectThrows(IllegalArgumentException.class, () -> new Builder(GEO_POINT_FIELD_NAME, "")); - assertEquals("geohash or point must be defined", e.getMessage()); - } - - public void testInvalidPrecision() { - GeohashCellQuery.Builder builder = new Builder(GEO_POINT_FIELD_NAME, new GeoPoint()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.precision(-1)); - assertThat(e.getMessage(), containsString("precision must be greater than 0")); - } - - public void testLocationParsing() throws IOException { - Point point = RandomShapeGenerator.xRandomPoint(random()); - Builder pointTestBuilder = new GeohashCellQuery.Builder("pin", new GeoPoint(point.getY(), point.getX())); - String pointTest1 = "{\"geohash_cell\": {\"pin\": {\"lat\": " + point.getY() + ",\"lon\": " + point.getX() + "}}}"; - assertParsedQuery(pointTest1, pointTestBuilder); - String pointTest2 = "{\"geohash_cell\": {\"pin\": \"" + point.getY() + "," + point.getX() + "\"}}"; - assertParsedQuery(pointTest2, pointTestBuilder); - String pointTest3 = "{\"geohash_cell\": {\"pin\": [" + point.getX() + "," + point.getY() + "]}}"; - assertParsedQuery(pointTest3, pointTestBuilder); - } - - public void testFromJson() throws IOException { - String json = - "{\n" + - " \"geohash_cell\" : {\n" + - " \"neighbors\" : true,\n" + - " \"precision\" : 3,\n" + - " \"pin\" : \"t4mk70fgk067\",\n" + - " \"ignore_unmapped\" : false,\n" + - " \"boost\" : 1.0\n" + - " }\n" + - "}"; - GeohashCellQuery.Builder parsed = (GeohashCellQuery.Builder) parseQuery(json); - checkGeneratedJson(json, parsed); - assertEquals(json, 3, parsed.precision().intValue()); - } - - @Override - public void testMustRewrite() throws IOException { - assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); - Version version = createShardContext().indexVersionCreated(); - if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - super.testMustRewrite(); - } - } - - public void testIgnoreUnmapped() throws IOException { - final GeohashCellQuery.Builder queryBuilder = new GeohashCellQuery.Builder("unmapped", "c"); - queryBuilder.ignoreUnmapped(true); - Query query = queryBuilder.toQuery(createShardContext()); - assertThat(query, notNullValue()); - assertThat(query, instanceOf(MatchNoDocsQuery.class)); - - final GeohashCellQuery.Builder failingQueryBuilder = new GeohashCellQuery.Builder("unmapped", "c"); - failingQueryBuilder.ignoreUnmapped(false); - QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(createShardContext())); - assertThat(e.getMessage(), containsString("failed to parse [" + GeohashCellQuery.NAME + "] query. missing [" - + BaseGeoPointFieldMapper.CONTENT_TYPE + "] field [unmapped]")); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java deleted file mode 100644 index 3b31d17d9ba..00000000000 --- a/core/src/test/java/org/elasticsearch/index/query/IndicesQueryBuilderTests.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.query; - -import org.apache.lucene.search.Query; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.test.AbstractQueryTestCase; -import org.junit.After; - -import java.io.IOException; - -public class IndicesQueryBuilderTests extends AbstractQueryTestCase { - - /** - * All tests create deprecation warnings when an new {@link IndicesQueryBuilder} is created. - * Instead of having to check them once in every single test, this is done here after each test is run - */ - @After - void checkWarningHeaders() throws IOException { - checkWarningHeaders("indices query is deprecated. Instead search on the '_index' field"); - } - - @Override - protected IndicesQueryBuilder doCreateTestQueryBuilder() { - String[] indices; - if (randomBoolean()) { - indices = new String[]{getIndex().getName()}; - } else { - indices = generateRandomStringArray(5, 10, false, false); - } - IndicesQueryBuilder query = new IndicesQueryBuilder(RandomQueryBuilder.createQuery(random()), indices); - - switch (randomInt(2)) { - case 0: - query.noMatchQuery(RandomQueryBuilder.createQuery(random())); - break; - case 1: - query.noMatchQuery(randomFrom(QueryBuilders.matchAllQuery(), new MatchNoneQueryBuilder())); - break; - default: - // do not set noMatchQuery - } - return query; - } - - @Override - protected void doAssertLuceneQuery(IndicesQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { - Query expected; - if (queryBuilder.indices().length == 1 && getIndex().getName().equals(queryBuilder.indices()[0])) { - expected = queryBuilder.innerQuery().toQuery(context.getQueryShardContext()); - } else { - expected = queryBuilder.noMatchQuery().toQuery(context.getQueryShardContext()); - } - assertEquals(expected, query); - } - - public void testIllegalArguments() { - expectThrows(IllegalArgumentException.class, () -> new IndicesQueryBuilder(null, "index")); - - expectThrows(IllegalArgumentException.class, () -> new IndicesQueryBuilder(new MatchAllQueryBuilder(), (String[]) null)); - expectThrows(IllegalArgumentException.class, () -> new IndicesQueryBuilder(new MatchAllQueryBuilder(), new String[0])); - - IndicesQueryBuilder indicesQueryBuilder = new IndicesQueryBuilder(new MatchAllQueryBuilder(), "index"); - expectThrows(IllegalArgumentException.class, () -> indicesQueryBuilder.noMatchQuery((QueryBuilder) null)); - expectThrows(IllegalArgumentException.class, () -> indicesQueryBuilder.noMatchQuery((String) null)); - } - - public void testFromJson() throws IOException { - String json = - "{\n" + - " \"indices\" : {\n" + - " \"indices\" : [ \"index1\", \"index2\" ],\n" + - " \"query\" : {\n" + - " \"term\" : {\n" + - " \"tag\" : {\n" + - " \"value\" : \"wow\",\n" + - " \"boost\" : 1.0\n" + - " }\n" + - " }\n" + - " },\n" + - " \"no_match_query\" : {\n" + - " \"term\" : {\n" + - " \"tag\" : {\n" + - " \"value\" : \"kow\",\n" + - " \"boost\" : 1.0\n" + - " }\n" + - " }\n" + - " },\n" + - " \"boost\" : 1.0\n" + - " }\n" + - "}"; - IndicesQueryBuilder parsed = (IndicesQueryBuilder) parseQuery(json); - checkGeneratedJson(json, parsed); - assertEquals(json, 2, parsed.indices().length); - assertEquals(json, "kow", ((TermQueryBuilder) parsed.noMatchQuery()).value()); - assertEquals(json, "wow", ((TermQueryBuilder) parsed.innerQuery()).value()); - } -} diff --git a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 00d41aa754e..a32eafd850c 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -363,34 +363,6 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase parseQuery(json, ParseFieldMatcher.STRICT)); - assertThat(e.getMessage(), containsString("Deprecated field [" + type + "] used, expected [match] instead")); - } - public void testFuzzinessOnNonStringField() throws Exception { assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); MatchQueryBuilder query = new MatchQueryBuilder(INT_FIELD_NAME, 42); diff --git a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index c0900de4de1..fc4c640dd45 100644 --- a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -339,40 +339,5 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase parseQuery(deprecatedJson)); - assertEquals("Deprecated field [mlt] used, expected [more_like_this] instead", e.getMessage()); - - checkWarningHeaders("Deprecated field [mlt] used, expected [more_like_this] instead"); } } diff --git a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index 05d461e1278..988f6f5c4ba 100644 --- a/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -25,10 +25,8 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ToParentBlockJoinQuery; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.fetch.subphase.InnerHitsContext; import org.elasticsearch.search.internal.SearchContext; @@ -51,9 +49,6 @@ public class NestedQueryBuilderTests extends AbstractQueryTestCase points = new ArrayList(); points.add(new GeoPoint(40, -70)); @@ -193,13 +180,6 @@ public class QueryDSLDocumentationTests extends ESTestCase { .indexedShapePath("location"); } - public void testGeoHashCell() { - geoHashCellQuery("pin.location", - new GeoPoint(13.4080, 52.5186)) - .neighbors(true) - .precision(3); - } - public void testHasChild() { hasChildQuery( "blog_tag", @@ -221,18 +201,6 @@ public class QueryDSLDocumentationTests extends ESTestCase { idsQuery().addIds("1", "4", "100"); } - public void testIndices() { - indicesQuery( - termQuery("tag", "wow"), - "index1", "index2" - ).noMatchQuery(termQuery("tag", "kow")); - - indicesQuery( - termQuery("tag", "wow"), - "index1", "index2" - ).noMatchQuery("all"); - } - public void testMatchAll() { matchAllQuery(); } diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java index 893c2ec4e56..687d44e6cbf 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java @@ -150,7 +150,10 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase builder.toQuery(createShardContext())); + assertEquals(exc.getMessage(), + "it is disallowed to disable [split_on_whitespace] if [auto_generate_phrase_queries] is activated"); + } } diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java index 60c17d88e65..09627d00d76 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryBuilderTests.java @@ -19,18 +19,18 @@ package org.elasticsearch.index.query; +import com.carrotsearch.randomizedtesting.generators.RandomPicks; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; -import org.apache.lucene.search.LegacyNumericRangeQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.LegacyDateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.index.mapper.MapperService; @@ -39,6 +39,7 @@ import org.elasticsearch.test.AbstractQueryTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.chrono.ISOChronology; +import org.locationtech.spatial4j.shape.SpatialRelation; import java.io.IOException; import java.util.HashMap; @@ -46,11 +47,8 @@ import java.util.Map; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.sameInstance; public class RangeQueryBuilderTests extends AbstractQueryTestCase { @@ -62,13 +60,13 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0); Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext()); // since age is automatically registered in data, we encode it as numeric - assertThat(parsedQuery, either(instanceOf(LegacyNumericRangeQuery.class)).or(instanceOf(PointRangeQuery.class))); - if (parsedQuery instanceof LegacyNumericRangeQuery) { - LegacyNumericRangeQuery rangeQuery = (LegacyNumericRangeQuery) parsedQuery; - assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME)); - assertThat(rangeQuery.getMin().intValue(), equalTo(23)); - assertThat(rangeQuery.getMax().intValue(), equalTo(54)); - assertThat(rangeQuery.includesMin(), equalTo(true)); - assertThat(rangeQuery.includesMax(), equalTo(false)); - } else { - assertEquals(IntPoint.newRangeQuery(INT_FIELD_NAME, 23, 53), parsedQuery); - } + assertThat(parsedQuery, instanceOf(PointRangeQuery.class)); + assertEquals(IntPoint.newRangeQuery(INT_FIELD_NAME, 23, 53), parsedQuery); } public void testDateRangeQueryFormat() throws IOException { @@ -290,22 +258,12 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase 0); - long startDate = System.currentTimeMillis(); String query = "{\n" + " \"range\" : {\n" + " \"" + DATE_FIELD_NAME + "\" : {\n" + @@ -391,21 +324,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase booleanTerms = new ArrayList<>(); - for (BooleanClause booleanClause : booleanQuery) { - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); - assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class)); - Term term = ((TermQuery) booleanClause.getQuery()).getTerm(); - booleanTerms.add(term); - } - CollectionUtil.timSort(booleanTerms); - List expectedTerms = new ArrayList<>(); - for (Object term : terms) { - if (term != null) { // terms lookup filters this out - expectedTerms.add(new Term(queryBuilder.fieldName(), term.toString())); - } - } - CollectionUtil.timSort(expectedTerms); - assertEquals(expectedTerms + " vs. " + booleanTerms, expectedTerms.size(), booleanTerms.size()); - assertEquals(expectedTerms + " vs. " + booleanTerms, expectedTerms, booleanTerms); + TermsQuery expected = new TermsQuery(queryBuilder.fieldName(), + terms.stream().filter(Objects::nonNull).map(Object::toString).map(BytesRef::new).collect(Collectors.toList())); + assertEquals(expected, query); } } @@ -213,7 +206,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase values = copy.values(); - assertEquals(Arrays.asList(1, 3, 4), values); + assertEquals(Arrays.asList(1L, 3L, 4L), values); } { TermsQueryBuilder builder = new TermsQueryBuilder("foo", new double[]{1, 3, 4}); @@ -255,19 +248,6 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase parseQuery(deprecatedJson)); - assertEquals("Deprecated field [in] used, expected [terms] instead", e.getMessage()); } @Override @@ -296,5 +276,27 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase list = Arrays.asList(); + assertSame(Collections.emptyList(), TermsQueryBuilder.convert(list)); + assertEquals(list, TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list))); + + list = Arrays.asList("abc"); + assertEquals(Arrays.asList(new BytesRef("abc")), TermsQueryBuilder.convert(list)); + assertEquals(list, TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list))); + + list = Arrays.asList("abc", new BytesRef("def")); + assertEquals(Arrays.asList(new BytesRef("abc"), new BytesRef("def")), TermsQueryBuilder.convert(list)); + assertEquals(Arrays.asList("abc", "def"), TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list))); + + list = Arrays.asList(5, 42L); + assertEquals(Arrays.asList(5L, 42L), TermsQueryBuilder.convert(list)); + assertEquals(Arrays.asList(5L, 42L), TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list))); + + list = Arrays.asList(5, 42d); + assertEquals(Arrays.asList(5, 42d), TermsQueryBuilder.convert(list)); + assertEquals(Arrays.asList(5, 42d), TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list))); + } } diff --git a/core/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java index d7537bfd71b..e272accd5e6 100644 --- a/core/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/WildcardQueryBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.query; +import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.WildcardQuery; import org.elasticsearch.common.ParsingException; @@ -125,4 +126,14 @@ public class WildcardQueryBuilderTests extends AbstractQueryTestCase parseQuery(shortJson)); assertEquals("[wildcard] query doesn't support multiple fields, found [user1] and [user2]", e.getMessage()); } + + public void testWithMetaDataField() throws IOException { + QueryShardContext context = createShardContext(); + for (String field : new String[]{"_type", "_all"}) { + WildcardQueryBuilder wildcardQueryBuilder = new WildcardQueryBuilder(field, "toto"); + Query query = wildcardQueryBuilder.toQuery(context); + Query expected = new WildcardQuery(new Term(field, "toto")); + assertEquals(expected, query); + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java b/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java new file mode 100644 index 00000000000..f6fbc3410ac --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java @@ -0,0 +1,154 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.search; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; + +import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.query.Operator; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Before; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ExecutionException; + +public class MatchQueryIT extends ESIntegTestCase { + private static final String INDEX = "test"; + + /** + * Test setup. + */ + @Before + public void setUp() throws Exception { + super.setUp(); + CreateIndexRequestBuilder builder = prepareCreate(INDEX).setSettings( + Settings.builder() + .put(indexSettings()) + .put("index.analysis.filter.syns.type", "synonym") + .putArray("index.analysis.filter.syns.synonyms", "wtf, what the fudge", "foo, bar baz") + .put("index.analysis.analyzer.lower_syns.type", "custom") + .put("index.analysis.analyzer.lower_syns.tokenizer", "standard") + .putArray("index.analysis.analyzer.lower_syns.filter", "lowercase", "syns") + .put("index.analysis.filter.graphsyns.type", "synonym_graph") + .putArray("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz") + .put("index.analysis.analyzer.lower_graphsyns.type", "custom") + .put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard") + .putArray("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns") + ); + + assertAcked(builder.addMapping(INDEX, createMapping())); + ensureGreen(); + + List builders = new ArrayList<>(); + builders.add(client().prepareIndex("test", "test", "1").setSource("field", "say wtf happened foo")); + builders.add(client().prepareIndex("test", "test", "2").setSource("field", "bar baz what the fudge man")); + builders.add(client().prepareIndex("test", "test", "3").setSource("field", "wtf")); + builders.add(client().prepareIndex("test", "test", "4").setSource("field", "what is the name for fudge")); + builders.add(client().prepareIndex("test", "test", "5").setSource("field", "bar two three")); + builders.add(client().prepareIndex("test", "test", "6").setSource("field", "bar baz two three")); + + indexRandom(true, false, builders); + } + + /** + * Setup the index mappings for the test index. + * + * @return the json builder with the index mappings + * @throws IOException on error creating mapping json + */ + private XContentBuilder createMapping() throws IOException { + return XContentFactory.jsonBuilder() + .startObject() + .startObject(INDEX) + .startObject("properties") + .startObject("field") + .field("type", "text") + .endObject() + .endObject() + .endObject() + .endObject(); + } + + public void testSimpleMultiTermPhrase() throws ExecutionException, InterruptedException { + // first search using regular synonym field using phrase + SearchResponse searchResponse = client().prepareSearch(INDEX) + .setQuery(QueryBuilders.matchPhraseQuery("field", "foo two three").analyzer("lower_syns")).get(); + + // because foo -> "bar baz" where "foo" and "bar" at position 0, "baz" and "two" at position 1. + // "bar two three", "bar baz three", "foo two three", "foo baz three" + assertHitCount(searchResponse, 1L); + assertSearchHits(searchResponse, "5"); // we should not match this but we do + + // same query using graph should find correct result + searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchPhraseQuery("field", "foo two three") + .analyzer("lower_graphsyns")).get(); + + assertHitCount(searchResponse, 1L); + assertSearchHits(searchResponse, "6"); + } + + public void testSimpleMultiTermAnd() throws ExecutionException, InterruptedException { + // first search using regular synonym field using phrase + SearchResponse searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchQuery("field", "say what the fudge") + .operator(Operator.AND).analyzer("lower_syns")).get(); + + // 0 = say, 1 = OR(wtf, what), 2 = the, 3 = fudge + // "the" and "fudge" are required here, even though they were part of the synonym which is also expanded + assertNoSearchHits(searchResponse); + + // same query using graph should find correct result + searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchQuery("field", "say what the fudge") + .operator(Operator.AND).analyzer("lower_graphsyns")).get(); + + assertHitCount(searchResponse, 1L); + assertSearchHits(searchResponse, "1"); + } + + public void testMinShouldMatch() throws ExecutionException, InterruptedException { + // no min should match + SearchResponse searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchQuery("field", "three what the fudge foo") + .operator(Operator.OR).analyzer("lower_graphsyns")).get(); + + assertHitCount(searchResponse, 6L); + assertSearchHits(searchResponse, "1", "2", "3", "4", "5", "6"); + + // same query, with min_should_match of 2 + searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchQuery("field", "three what the fudge foo") + .operator(Operator.OR).analyzer("lower_graphsyns").minimumShouldMatch("80%")).get(); + + // three wtf foo = 2 terms, match #1 + // three wtf bar baz = 3 terms, match #6 + // three what the fudge foo = 4 terms, no match + // three what the fudge bar baz = 4 terms, match #2 + assertHitCount(searchResponse, 3L); + assertSearchHits(searchResponse, "1", "2", "6"); + } +} diff --git a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java index 7d7b7a4cd6e..992667a5056 100644 --- a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java @@ -28,10 +28,12 @@ import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.MapperService; @@ -55,7 +57,12 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { @Before public void setup() throws IOException { - IndexService indexService = createIndex("test"); + Settings settings = Settings.builder() + .put("index.analysis.filter.syns.type","synonym") + .putArray("index.analysis.filter.syns.synonyms","quick,fast") + .put("index.analysis.analyzer.syns.tokenizer","standard") + .put("index.analysis.analyzer.syns.filter","syns").build(); + IndexService indexService = createIndex("test", settings); MapperService mapperService = indexService.mapperService(); String mapping = "{\n" + " \"person\":{\n" + @@ -63,10 +70,12 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { " \"name\":{\n" + " \"properties\":{\n" + " \"first\": {\n" + - " \"type\":\"text\"\n" + + " \"type\":\"text\",\n" + + " \"analyzer\":\"syns\"\n" + " }," + " \"last\": {\n" + - " \"type\":\"text\"\n" + + " \"type\":\"text\",\n" + + " \"analyzer\":\"syns\"\n" + " }" + " }" + " }\n" + @@ -176,4 +185,34 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { assertThat(parsedQuery, instanceOf(MultiPhrasePrefixQuery.class)); assertThat(parsedQuery.toString(), equalTo("_all:\"foo*\"")); } + + public void testMultiMatchCrossFieldsWithSynonyms() throws IOException { + QueryShardContext queryShardContext = indexService.newQueryShardContext( + randomInt(20), null, () -> { throw new UnsupportedOperationException(); }); + + // check that synonym query is used for a single field + Query parsedQuery = + multiMatchQuery("quick").field("name.first") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext); + Term[] terms = new Term[2]; + terms[0] = new Term("name.first", "quick"); + terms[1] = new Term("name.first", "fast"); + Query expectedQuery = new SynonymQuery(terms); + assertThat(parsedQuery, equalTo(expectedQuery)); + + // check that blended term query is used for multiple fields + parsedQuery = + multiMatchQuery("quick").field("name.first").field("name.last") + .type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext); + terms = new Term[4]; + terms[0] = new Term("name.first", "quick"); + terms[1] = new Term("name.first", "fast"); + terms[2] = new Term("name.last", "quick"); + terms[3] = new Term("name.last", "fast"); + float[] boosts = new float[4]; + Arrays.fill(boosts, 1.0f); + expectedQuery = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f); + assertThat(parsedQuery, equalTo(expectedQuery)); + + } } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index e75261326bb..d32ae8e03ed 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -56,7 +56,6 @@ import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.UidFieldMapper; -import org.elasticsearch.index.mapper.internal.SeqNoFieldMapper; import org.elasticsearch.index.seqno.SequenceNumbersService; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; @@ -100,14 +99,14 @@ public class IndexShardIT extends ESSingleNodeTestCase { return pluginList(InternalSettingsPlugin.class); } - private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long seqNo, long timestamp, long ttl, + private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long seqNo, ParseContext.Document document, BytesReference source, Mapping mappingUpdate) { Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE); Field seqNoField = new NumericDocValuesField("_seq_no", seqNo); Field versionField = new NumericDocValuesField("_version", 0); document.add(uidField); document.add(versionField); - return new ParsedDocument(versionField, seqNoField, id, type, routing, timestamp, ttl, Collections.singletonList(document), source, + return new ParsedDocument(versionField, seqNoField, id, type, routing, Collections.singletonList(document), source, mappingUpdate); } @@ -318,7 +317,7 @@ public class IndexShardIT extends ESSingleNodeTestCase { assertFalse(shard.shouldFlush()); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder() .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), - new ByteSizeValue(133 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); + new ByteSizeValue(117 /* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE).get(); assertFalse(shard.shouldFlush()); ParsedDocument doc = testParsedDocument( @@ -327,8 +326,6 @@ public class IndexShardIT extends ESSingleNodeTestCase { "test", null, SequenceNumbersService.UNASSIGNED_SEQ_NO, - -1, - -1, new ParseContext.Document(), new BytesArray(new byte[]{1}), null); Engine.Index index = new Engine.Index(new Term("_uid", "1"), doc); @@ -367,7 +364,7 @@ public class IndexShardIT extends ESSingleNodeTestCase { assertFalse(shard.shouldFlush()); client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put( IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), - new ByteSizeValue(133/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); + new ByteSizeValue(117/* size of the operation + header&footer*/, ByteSizeUnit.BYTES)).build()).get(); client().prepareIndex("test", "test", "0").setSource("{}").setRefreshPolicy(randomBoolean() ? IMMEDIATE : NONE).get(); assertFalse(shard.shouldFlush()); final AtomicBoolean running = new AtomicBoolean(true); diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 52861f85def..4ab702c33de 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -62,7 +62,6 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.fielddata.FieldDataStats; @@ -539,14 +538,14 @@ public class IndexShardTests extends IndexShardTestCase { closeShards(shard); } - private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, + private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, ParseContext.Document document, BytesReference source, Mapping mappingUpdate) { Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE); Field versionField = new NumericDocValuesField("_version", 0); Field seqNoField = new NumericDocValuesField("_seq_no", 0); document.add(uidField); document.add(versionField); - return new ParsedDocument(versionField, seqNoField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate); + return new ParsedDocument(versionField, seqNoField, id, type, routing, Arrays.asList(document), source, mappingUpdate); } public void testIndexingOperationsListeners() throws IOException { @@ -608,7 +607,7 @@ public class IndexShardTests extends IndexShardTestCase { }); recoveryShardFromStore(shard); - ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, new ParseContext.Document(), + ParsedDocument doc = testParsedDocument("1", "1", "test", null, new ParseContext.Document(), new BytesArray(new byte[]{1}), null); Engine.Index index = new Engine.Index(new Term("_uid", "1"), doc); shard.index(index); diff --git a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 6f537574d2e..0598646c163 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -277,7 +277,7 @@ public class RefreshListenersTests extends ESTestCase { document.add(uidField); document.add(versionField); BytesReference source = new BytesArray(new byte[] { 1 }); - ParsedDocument doc = new ParsedDocument(versionField, seqNoField, id, type, null, -1, -1, Arrays.asList(document), source, null); + ParsedDocument doc = new ParsedDocument(versionField, seqNoField, id, type, null, Arrays.asList(document), source, null); Engine.Index index = new Engine.Index(new Term("_uid", uid), doc); return engine.index(index); } diff --git a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java index 91dc0728977..9c4bcce6ff6 100644 --- a/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/IndexStoreTests.java @@ -73,7 +73,6 @@ public class IndexStoreTests extends ESTestCase { assertTrue(type + " " + directory.toString(), directory instanceof SimpleFSDirectory); break; case FS: - case DEFAULT: if (Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { assertTrue(directory.toString(), directory instanceof MMapDirectory); } else if (Constants.WINDOWS) { diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index b4a5c2970ff..0bd1c9c6140 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -86,7 +86,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.LongSupplier; import java.util.stream.Collectors; import static org.hamcrest.Matchers.equalTo; @@ -302,7 +301,7 @@ public class TranslogTests extends ESTestCase { assertThat(stats.estimatedNumberOfOperations(), equalTo(0L)); assertThat(stats.getTranslogSizeInBytes(), equalTo(firstOperationPosition)); assertEquals(6, total.estimatedNumberOfOperations()); - assertEquals(461, total.getTranslogSizeInBytes()); + assertEquals(413, total.getTranslogSizeInBytes()); BytesStreamOutput out = new BytesStreamOutput(); total.writeTo(out); @@ -310,13 +309,13 @@ public class TranslogTests extends ESTestCase { copy.readFrom(out.bytes().streamInput()); assertEquals(6, copy.estimatedNumberOfOperations()); - assertEquals(461, copy.getTranslogSizeInBytes()); + assertEquals(413, copy.getTranslogSizeInBytes()); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { builder.startObject(); copy.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - assertEquals("{\"translog\":{\"operations\":6,\"size_in_bytes\":461}}", builder.string()); + assertEquals("{\"translog\":{\"operations\":6,\"size_in_bytes\":413}}", builder.string()); } try { @@ -1138,7 +1137,7 @@ public class TranslogTests extends ESTestCase { try (Translog ignored = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { fail("corrupted"); } catch (IllegalStateException ex) { - assertEquals(ex.getMessage(), "Checkpoint file translog-2.ckp already exists but has corrupted content expected: Checkpoint{offset=3178, numOps=55, translogFileGeneration=2, globalCheckpoint=-2} but got: Checkpoint{offset=0, numOps=0, translogFileGeneration=0, globalCheckpoint=-2}"); + assertEquals(ex.getMessage(), "Checkpoint file translog-2.ckp already exists but has corrupted content expected: Checkpoint{offset=2298, numOps=55, translogFileGeneration=2, globalCheckpoint=-2} but got: Checkpoint{offset=0, numOps=0, translogFileGeneration=0, globalCheckpoint=-2}"); } Checkpoint.write(FileChannel::open, config.getTranslogPath().resolve(Translog.getCommitCheckpointFileName(read.generation)), read, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING); try (Translog translog = new Translog(config, translogGeneration, () -> SequenceNumbersService.UNASSIGNED_SEQ_NO)) { diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 9750cd35d01..b94e864fdd6 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -38,9 +38,9 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.index.mapper.StringFieldMapper; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; import org.elasticsearch.index.similarity.BM25SimilarityProvider; @@ -90,7 +90,7 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { @Override public Map getMappers() { - return Collections.singletonMap("fake-mapper", new StringFieldMapper.TypeParser()); + return Collections.singletonMap("fake-mapper", new KeywordFieldMapper.TypeParser()); } @Override diff --git a/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index fca09e74332..d1d266a42a4 100644 --- a/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/core/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -20,6 +20,7 @@ package org.elasticsearch.indices.cluster; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteRequest; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; @@ -158,7 +159,7 @@ public class ClusterStateChanges extends AbstractComponent { MetaDataIndexUpgradeService metaDataIndexUpgradeService = new MetaDataIndexUpgradeService(settings, null, null) { // metaData upgrader should do nothing @Override - public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData) { + public IndexMetaData upgradeIndexMetaData(IndexMetaData indexMetaData, Version minimumIndexCompatibilityVersion) { return indexMetaData; } }; diff --git a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 612e1d1e16b..57777453976 100644 --- a/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/core/src/test/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -695,7 +695,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase { .setCreate(true) .setOrder(1) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("field2").field("type", "string").field("analyzer", "custom_1").endObject() + .startObject("field2").field("type", "text").field("analyzer", "custom_1").endObject() .endObject().endObject().endObject()) .get()); assertThat(e.getMessage(), containsString("analyzer [custom_1] not found for field [field2]")); diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java index 2c3730dc422..14335bde076 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestClientIT.java @@ -120,7 +120,7 @@ public class IngestClientIT extends ESIntegTestCase { source.put("foo", "bar"); source.put("fail", false); source.put("processed", true); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, source); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, source); assertThat(simulateDocumentBaseResult.getIngestDocument().getSourceAndMetadata(), equalTo(ingestDocument.getSourceAndMetadata())); assertThat(simulateDocumentBaseResult.getFailure(), nullValue()); } diff --git a/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 3906f82dc03..e16be95d2e6 100644 --- a/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -75,7 +75,7 @@ public class IngestDocumentTests extends ESTestCase { list.add(null); document.put("list", list); - ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + ingestDocument = new IngestDocument("index", "type", "id", null, null, document); } public void testSimpleGetFieldValue() { diff --git a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java index 947cb3f18d1..c00e9254ab2 100644 --- a/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java +++ b/core/src/test/java/org/elasticsearch/ingest/PipelineExecutionServiceTests.java @@ -20,14 +20,12 @@ package org.elasticsearch.ingest; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -160,12 +158,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { doAnswer((InvocationOnMock invocationOnMock) -> { IngestDocument ingestDocument = (IngestDocument) invocationOnMock.getArguments()[0]; for (IngestDocument.MetaData metaData : IngestDocument.MetaData.values()) { - if (metaData == IngestDocument.MetaData.TTL) { - ingestDocument.setFieldValue(IngestDocument.MetaData.TTL.getFieldName(), "35d"); - } else { - ingestDocument.setFieldValue(metaData.getFieldName(), "update" + metaData.getFieldName()); - } - + ingestDocument.setFieldValue(metaData.getFieldName(), "update" + metaData.getFieldName()); } return null; }).when(processor).execute(any()); @@ -186,8 +179,6 @@ public class PipelineExecutionServiceTests extends ESTestCase { assertThat(indexRequest.id(), equalTo("update_id")); assertThat(indexRequest.routing(), equalTo("update_routing")); assertThat(indexRequest.parent(), equalTo("update_parent")); - assertThat(indexRequest.timestamp(), equalTo("update_timestamp")); - assertThat(indexRequest.ttl(), equalTo(new TimeValue(3024000000L))); } public void testExecuteFailure() throws Exception { @@ -266,53 +257,6 @@ public class PipelineExecutionServiceTests extends ESTestCase { verify(completionHandler, never()).accept(anyBoolean()); } - public void testExecuteSetTTL() throws Exception { - Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "5d")); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, new CompoundProcessor(processor))); - - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); - @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); - @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); - executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); - - assertThat(indexRequest.ttl(), equalTo(TimeValue.parseTimeValue("5d", null, "ttl"))); - verify(failureHandler, never()).accept(any()); - verify(completionHandler, times(1)).accept(true); - } - - public void testExecuteSetInvalidTTL() throws Exception { - Processor processor = new TestProcessor(ingestDocument -> ingestDocument.setFieldValue("_ttl", "abc")); - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, new CompoundProcessor(processor))); - - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").source(Collections.emptyMap()).setPipeline("_id"); - @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); - @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); - executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); - verify(failureHandler, times(1)).accept(any(ElasticsearchParseException.class)); - verify(completionHandler, never()).accept(anyBoolean()); - } - - public void testExecuteProvidedTTL() throws Exception { - when(store.get("_id")).thenReturn(new Pipeline("_id", "_description", version, mock(CompoundProcessor.class))); - - IndexRequest indexRequest = new IndexRequest("_index", "_type", "_id").setPipeline("_id") - .source(Collections.emptyMap()) - .ttl(1000L); - @SuppressWarnings("unchecked") - Consumer failureHandler = mock(Consumer.class); - @SuppressWarnings("unchecked") - Consumer completionHandler = mock(Consumer.class); - executionService.executeIndexRequest(indexRequest, failureHandler, completionHandler); - - assertThat(indexRequest.ttl(), equalTo(new TimeValue(1000L))); - verify(failureHandler, never()).accept(any()); - verify(completionHandler, times(1)).accept(true); - } - public void testBulkRequestExecutionWithFailures() throws Exception { BulkRequest bulkRequest = new BulkRequest(); String pipelineId = "_id"; @@ -439,7 +383,7 @@ public class PipelineExecutionServiceTests extends ESTestCase { private final IngestDocument ingestDocument; public IngestDocumentMatcher(String index, String type, String id, Map source) { - this.ingestDocument = new IngestDocument(index, type, id, null, null, null, null, source); + this.ingestDocument = new IngestDocument(index, type, id, null, null, source); } @Override diff --git a/core/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/core/src/test/java/org/elasticsearch/search/SearchCancellationTests.java index 48efc189ed2..2ee81e935e2 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchCancellationTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchCancellationTests.java @@ -44,7 +44,7 @@ public class SearchCancellationTests extends ESTestCase { static IndexReader reader; @BeforeClass - public static void before() throws IOException { + public static void setup() throws IOException { dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); // we need at least 2 segments - so no merges should be allowed @@ -66,7 +66,7 @@ public class SearchCancellationTests extends ESTestCase { } @AfterClass - public static void after() throws IOException { + public static void cleanup() throws IOException { IOUtils.close(reader, dir); dir = null; reader = null; diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index c3324955fe6..f0dde1804d1 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -259,14 +259,11 @@ public class SearchModuleTests extends ModuleTestCase { "fuzzy", "geo_bounding_box", "geo_distance", - "geo_distance_range", "geo_polygon", "geo_shape", - "geohash_cell", "has_child", "has_parent", "ids", - "indices", "match", "match_all", "match_none", @@ -297,13 +294,8 @@ public class SearchModuleTests extends ModuleTestCase { "wrapper" }; - private static final String[] DEPRECATED_QUERIES = new String[] { - "fuzzy_match", - "geo_bbox", - "in", - "match_fuzzy", - "mlt" - }; + //add here deprecated queries to make sure we log a deprecation warnings when they are used + private static final String[] DEPRECATED_QUERIES = new String[] {}; /** * Dummy test {@link AggregationBuilder} used to test registering aggregation builders. diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java index 9493ec048e7..8e81a3a852e 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportSearchFailuresIT.java @@ -29,7 +29,7 @@ import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Priority; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.GeohashCellQuery; +import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESIntegTestCase; @@ -52,7 +52,7 @@ public class TransportSearchFailuresIT extends ESIntegTestCase { public void testFailedSearchWithWrongQuery() throws Exception { logger.info("Start Testing failed search with wrong query"); - assertAcked(prepareCreate("test", 1)); + assertAcked(prepareCreate("test", 1).addMapping("type", "foo", "type=geo_point")); NumShards test = getNumShards("test"); @@ -66,7 +66,7 @@ public class TransportSearchFailuresIT extends ESIntegTestCase { for (int i = 0; i < 5; i++) { try { SearchResponse searchResponse = client().search( - searchRequest("test").source(new SearchSourceBuilder().query(new GeohashCellQuery.Builder("foo", "biz")))) + searchRequest("test").source(new SearchSourceBuilder().query(new MatchQueryBuilder("foo", "biz")))) .actionGet(); assertThat(searchResponse.getTotalShards(), equalTo(test.numPrimaries)); assertThat(searchResponse.getSuccessfulShards(), equalTo(0)); @@ -101,7 +101,7 @@ public class TransportSearchFailuresIT extends ESIntegTestCase { for (int i = 0; i < 5; i++) { try { SearchResponse searchResponse = client().search( - searchRequest("test").source(new SearchSourceBuilder().query(new GeohashCellQuery.Builder("foo", "biz")))) + searchRequest("test").source(new SearchSourceBuilder().query(new MatchQueryBuilder("foo", "biz")))) .actionGet(); assertThat(searchResponse.getTotalShards(), equalTo(test.numPrimaries)); assertThat(searchResponse.getSuccessfulShards(), equalTo(0)); diff --git a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 9f03724d3c6..c201f03a7d8 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -28,7 +28,7 @@ import org.elasticsearch.client.Requests; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.query.GeohashCellQuery; +import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder; import org.elasticsearch.script.Script; @@ -61,7 +61,6 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -90,7 +89,8 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { } client().admin().indices().create(createIndexRequest("test") - .settings(settingsBuilder)) + .settings(settingsBuilder) + .mapping("type", "foo", "type=geo_point")) .actionGet(); ensureGreen(); @@ -151,8 +151,15 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { assertThat(hit.explanation(), notNullValue()); assertThat(hit.explanation().getDetails().length, equalTo(1)); assertThat(hit.explanation().getDetails()[0].getDetails().length, equalTo(2)); - assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDescription(), - endsWith("idf(docFreq=100, docCount=100)")); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails().length, equalTo(2)); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[0].getDescription(), + equalTo("docFreq")); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[0].getValue(), + equalTo(100.0f)); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[1].getDescription(), + equalTo("docCount")); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[1].getValue(), + equalTo(100.0f)); assertThat("id[" + hit.id() + "] -> " + hit.explanation().toString(), hit.id(), equalTo(Integer.toString(100 - total - i - 1))); } total += hits.length; @@ -179,8 +186,15 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { assertThat(hit.explanation(), notNullValue()); assertThat(hit.explanation().getDetails().length, equalTo(1)); assertThat(hit.explanation().getDetails()[0].getDetails().length, equalTo(2)); - assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDescription(), - endsWith("idf(docFreq=100, docCount=100)")); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails().length, equalTo(2)); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[0].getDescription(), + equalTo("docFreq")); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[0].getValue(), + equalTo(100.0f)); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[1].getDescription(), + equalTo("docCount")); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[1].getValue(), + equalTo(100.0f)); assertThat("id[" + hit.id() + "]", hit.id(), equalTo(Integer.toString(total + i))); } total += hits.length; @@ -329,8 +343,15 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { assertThat(hit.explanation(), notNullValue()); assertThat(hit.explanation().getDetails().length, equalTo(1)); assertThat(hit.explanation().getDetails()[0].getDetails().length, equalTo(2)); - assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDescription(), - endsWith("idf(docFreq=100, docCount=100)")); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails().length, equalTo(2)); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[0].getDescription(), + equalTo("docFreq")); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[0].getValue(), + equalTo(100.0f)); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[1].getDescription(), + equalTo("docCount")); + assertThat(hit.explanation().getDetails()[0].getDetails()[0].getDetails()[1].getValue(), + equalTo(100.0f)); // assertThat("id[" + hit.id() + "]", hit.id(), equalTo(Integer.toString(100 - i - 1))); assertThat("make sure we don't have duplicates", expectedIds.remove(hit.id()), notNullValue()); } @@ -379,7 +400,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { logger.info("Start Testing failed search with wrong query"); try { SearchResponse searchResponse = client().search( - searchRequest("test").source(new SearchSourceBuilder().query(new GeohashCellQuery.Builder("foo", "biz")))).actionGet(); + searchRequest("test").source(new SearchSourceBuilder().query(new MatchQueryBuilder("foo", "biz")))).actionGet(); assertThat(searchResponse.getTotalShards(), equalTo(test.numPrimaries)); assertThat(searchResponse.getSuccessfulShards(), equalTo(0)); assertThat(searchResponse.getFailedShards(), equalTo(test.numPrimaries)); @@ -427,8 +448,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { logger.info("Start Testing failed multi search with a wrong query"); MultiSearchResponse response = client().prepareMultiSearch() - // Add geo distance range query against a field that doesn't exist (should be a geo point for the query to work) - .add(client().prepareSearch("test").setQuery(QueryBuilders.geoDistanceRangeQuery("non_existing_field", 1, 1).from(10).to(15))) + .add(client().prepareSearch("test").setQuery(new MatchQueryBuilder("foo", "biz"))) .add(client().prepareSearch("test").setQuery(QueryBuilders.termQuery("nid", 2))) .add(client().prepareSearch("test").setQuery(QueryBuilders.matchAllQuery())) .execute().actionGet(); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java index ebf3b6e1a50..fc22b3198a1 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/MatchedQueriesIT.java @@ -27,7 +27,6 @@ import org.elasticsearch.test.ESIntegTestCase; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; -import static org.elasticsearch.index.query.QueryBuilders.indicesQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery; @@ -171,44 +170,6 @@ public class MatchedQueriesIT extends ESIntegTestCase { } } - public void testIndicesFilterSupportsName() { - createIndex("test1", "test2"); - ensureGreen(); - - client().prepareIndex("test1", "type1", "1").setSource("title", "title1").get(); - client().prepareIndex("test2", "type1", "2").setSource("title", "title2").get(); - client().prepareIndex("test2", "type1", "3").setSource("title", "title3").get(); - refresh(); - - SearchResponse searchResponse = client().prepareSearch() - .setQuery(boolQuery().must(matchAllQuery()).filter( - boolQuery().should( - indicesQuery(termQuery("title", "title1").queryName("title1"), "test1") - .noMatchQuery(termQuery("title", "title2").queryName("title2")).queryName("indices_filter")).should( - termQuery("title", "title3").queryName("title3")).queryName("or"))).get(); - assertHitCount(searchResponse, 3L); - - for (SearchHit hit : searchResponse.getHits()) { - if (hit.id().equals("1")) { - assertThat(hit.matchedQueries().length, equalTo(3)); - assertThat(hit.matchedQueries(), hasItemInArray("indices_filter")); - assertThat(hit.matchedQueries(), hasItemInArray("title1")); - assertThat(hit.matchedQueries(), hasItemInArray("or")); - } else if (hit.id().equals("2")) { - assertThat(hit.matchedQueries().length, equalTo(3)); - assertThat(hit.matchedQueries(), hasItemInArray("indices_filter")); - assertThat(hit.matchedQueries(), hasItemInArray("title2")); - assertThat(hit.matchedQueries(), hasItemInArray("or")); - } else if (hit.id().equals("3")) { - assertThat(hit.matchedQueries().length, equalTo(2)); - assertThat(hit.matchedQueries(), hasItemInArray("title3")); - assertThat(hit.matchedQueries(), hasItemInArray("or")); - } else { - fail("Unexpected document returned with id " + hit.id()); - } - } - } - public void testRegExpQuerySupportsName() { createIndex("test1"); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 8f8887bd150..9b54363936f 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -39,6 +39,7 @@ import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; +import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder; import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; @@ -2797,7 +2798,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .field("type", "geo_point") .endObject() .startObject("jd") - .field("type", "string") + .field("type", "text") .endObject() .endObject() .endObject(); @@ -2850,35 +2851,6 @@ public class HighlighterSearchIT extends ESIntegTestCase { equalTo("some text")); } - public void testStringFieldHighlighting() throws IOException { - // check that string field highlighting on old indexes works - XContentBuilder mappings = jsonBuilder(); - mappings.startObject(); - mappings.startObject("type") - .startObject("properties") - .startObject("string_field") - .field("type", "string") - .endObject() - .endObject() - .endObject(); - mappings.endObject(); - assertAcked(prepareCreate("test") - .addMapping("type", mappings) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_2))); - - client().prepareIndex("test", "type", "1") - .setSource(jsonBuilder().startObject().field("string_field", "some text").endObject()) - .get(); - refresh(); - SearchResponse search = client().prepareSearch().setSource(new SearchSourceBuilder() - .query(QueryBuilders.matchQuery("string_field", "some text")) - .highlighter(new HighlightBuilder().field("*"))).get(); - assertNoFailures(search); - assertThat(search.getHits().totalHits(), equalTo(1L)); - assertThat(search.getHits().getAt(0).getHighlightFields().get("string_field").getFragments()[0].string(), - equalTo("some text")); - } - public void testACopyFieldWithNestedQuery() throws Exception { String mapping = jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("foo") @@ -2935,6 +2907,26 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertThat(field.getFragments()[0].string(), equalTo("brown")); } + public void testFiltersFunctionScoreQueryHighlight() throws Exception { + client().prepareIndex("test", "type", "1") + .setSource(jsonBuilder().startObject().field("text", "brown").field("enable", "yes").endObject()) + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + FunctionScoreQueryBuilder.FilterFunctionBuilder filterBuilder = + new FunctionScoreQueryBuilder.FilterFunctionBuilder(QueryBuilders.termQuery("enable", "yes"), + new RandomScoreFunctionBuilder()); + SearchResponse searchResponse = client().prepareSearch() + .setQuery(new FunctionScoreQueryBuilder(QueryBuilders.prefixQuery("text", "bro"), + new FunctionScoreQueryBuilder.FilterFunctionBuilder[] {filterBuilder})) + .highlighter(new HighlightBuilder() + .field(new Field("text"))) + .get(); + assertHitCount(searchResponse, 1); + HighlightField field = searchResponse.getHits().getAt(0).highlightFields().get("text"); + assertThat(field.getFragments().length, equalTo(1)); + assertThat(field.getFragments()[0].string(), equalTo("brown")); + } + public void testSynonyms() throws IOException { Builder builder = Settings.builder() .put(indexSettings()) diff --git a/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index da844b1969e..f20c87ad387 100644 --- a/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/core/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.fielddata.ScriptDocValues; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; @@ -299,21 +298,18 @@ public class SearchFieldsIT extends ESIntegTestCase { assertFalse(response.getHits().getAt(0).hasSource()); assertThat(response.getHits().getAt(0).id(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); assertThat(response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(0).fields().get("sNum1_field").values().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(0).fields().get("date1").values().get(0), equalTo(0L)); assertThat(response.getHits().getAt(1).id(), equalTo("2")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); assertThat(response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).fields().get("sNum1_field").values().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).fields().get("date1").values().get(0), equalTo(25000L)); assertThat(response.getHits().getAt(2).id(), equalTo("3")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("sNum1", "sNum1_field", "date1"))); assertThat(response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0)); assertThat(response.getHits().getAt(2).fields().get("sNum1_field").values().get(0), equalTo(3.0)); @@ -330,17 +326,14 @@ public class SearchFieldsIT extends ESIntegTestCase { assertThat(response.getHits().totalHits(), equalTo(3L)); assertThat(response.getHits().getAt(0).id(), equalTo("1")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("sNum1"))); assertThat(response.getHits().getAt(0).fields().get("sNum1").values().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).id(), equalTo("2")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("sNum1"))); assertThat(response.getHits().getAt(1).fields().get("sNum1").values().get(0), equalTo(4.0)); assertThat(response.getHits().getAt(2).id(), equalTo("3")); fields = new HashSet<>(response.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("sNum1"))); assertThat(response.getHits().getAt(2).fields().get("sNum1").values().get(0), equalTo(6.0)); } @@ -369,7 +362,6 @@ public class SearchFieldsIT extends ESIntegTestCase { for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("uid"))); assertThat(response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i))); } @@ -387,7 +379,6 @@ public class SearchFieldsIT extends ESIntegTestCase { for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("id"))); assertThat(response.getHits().getAt(i).fields().get("id").value(), equalTo(Integer.toString(i))); } @@ -406,7 +397,6 @@ public class SearchFieldsIT extends ESIntegTestCase { for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(singleton("type"))); assertThat(response.getHits().getAt(i).fields().get("type").value(), equalTo("type1")); } @@ -427,7 +417,6 @@ public class SearchFieldsIT extends ESIntegTestCase { for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).id(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("uid", "type", "id"))); assertThat(response.getHits().getAt(i).fields().get("uid").value(), equalTo("type1#" + Integer.toString(i))); assertThat(response.getHits().getAt(i).fields().get("type").value(), equalTo("type1")); @@ -600,7 +589,6 @@ public class SearchFieldsIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); Set fields = new HashSet<>(searchResponse.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field", "float_field", "double_field", "date_field", "boolean_field", "binary_field"))); @@ -816,7 +804,6 @@ public class SearchFieldsIT extends ESIntegTestCase { assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L)); assertThat(searchResponse.getHits().hits().length, equalTo(1)); Set fields = new HashSet<>(searchResponse.getHits().getAt(0).fields().keySet()); - fields.remove(TimestampFieldMapper.NAME); // randomly enabled via templates assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field", "float_field", "double_field", "date_field", "boolean_field", "text_field", "keyword_field"))); @@ -880,8 +867,6 @@ public class SearchFieldsIT extends ESIntegTestCase { indexRandom(true, client().prepareIndex("test", "my-type1", "1") .setRouting("1") - .setTimestamp("205097") - .setTTL(10000000000000L) .setParent("parent_1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject())); diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index cf1e7a08a87..13a2c3a1c3a 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -605,16 +605,13 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { } public void testManyDocsLin() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("test").field("type", "text").endObject().startObject("date").field("type", "date") .field("doc_values", true).endObject().startObject("num").field("type", "double") .field("doc_values", true).endObject().startObject("geo").field("type", "geo_point") .field("ignore_malformed", true); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("coerce", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type", xContentBuilder.string())); int numDocs = 200; diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java index 97615f63c9d..646cc310fc8 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java @@ -55,9 +55,6 @@ public class GeoBoundingBoxIT extends ESIntegTestCase { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); @@ -129,9 +126,6 @@ public class GeoBoundingBoxIT extends ESIntegTestCase { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); @@ -184,9 +178,6 @@ public class GeoBoundingBoxIT extends ESIntegTestCase { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java index 6c9acd7e8a7..3594f51c722 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java @@ -105,9 +105,6 @@ public class GeoDistanceIT extends ESIntegTestCase { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java index 7f880211c3b..2707e3c0b27 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoFilterIT.java @@ -21,7 +21,6 @@ package org.elasticsearch.search.geo; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.query.SpatialArgs; @@ -34,7 +33,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Priority; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; @@ -49,8 +47,6 @@ import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; -import org.elasticsearch.index.query.GeohashCellQuery; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchHit; @@ -70,22 +66,16 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; import java.util.Random; import java.util.zip.GZIPInputStream; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery; import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; -import static org.elasticsearch.index.query.QueryBuilders.geoHashCellQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirstHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.closeTo; @@ -383,9 +373,6 @@ public class GeoFilterIT extends ESIntegTestCase { .startObject("properties") .startObject("pin") .field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.field("store", true) .endObject() .startObject("location") @@ -430,13 +417,7 @@ public class GeoFilterIT extends ESIntegTestCase { GeoPoint point = new GeoPoint(); for (SearchHit hit : distance.getHits()) { String name = hit.getId(); - if (version.before(Version.V_2_2_0)) { - point.resetFromString(hit.fields().get("pin").getValue().toString()); - } else if (version.before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) { - point.resetFromIndexHash(hit.fields().get("pin").getValue()); - } else { - point.resetFromString(hit.getFields().get("pin").getValue()); - } + point.resetFromString(hit.getFields().get("pin").getValue()); double dist = distance(point.getLat(), point.getLon(), 51.11, 9.851); assertThat("distance to '" + name + "'", dist, lessThanOrEqualTo(425000d)); @@ -447,82 +428,6 @@ public class GeoFilterIT extends ESIntegTestCase { } } - public void testLegacyGeohashCellFilter() throws IOException { - String geohash = randomhash(10); - logger.info("Testing geohash_cell filter for [{}]", geohash); - - Collection neighbors = GeoHashUtils.neighbors(geohash); - Collection parentNeighbors = GeoHashUtils.neighbors(geohash.substring(0, geohash.length() - 1)); - - logger.info("Neighbors {}", neighbors); - logger.info("Parent Neighbors {}", parentNeighbors); - - ensureYellow(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - - client().admin().indices().prepareCreate("locations").setSettings(settings).addMapping("location", "pin", - "type=geo_point,geohash_prefix=true,lat_lon=false").execute().actionGet(); - - // Index a pin - client().prepareIndex("locations", "location", "1").setCreate(true).setSource("pin", geohash).execute().actionGet(); - - // index neighbors - Iterator iterator = neighbors.iterator(); - for (int i = 0; iterator.hasNext(); i++) { - client().prepareIndex("locations", "location", "N" + i).setCreate(true).setSource("pin", iterator.next()).execute().actionGet(); - } - - // Index parent cell - client().prepareIndex("locations", "location", "p").setCreate(true).setSource("pin", geohash.substring(0, geohash.length() - 1)).execute().actionGet(); - - // index neighbors - iterator = parentNeighbors.iterator(); - for (int i = 0; iterator.hasNext(); i++) { - client().prepareIndex("locations", "location", "p" + i).setCreate(true).setSource("pin", iterator.next()).execute().actionGet(); - } - - client().admin().indices().prepareRefresh("locations").execute().actionGet(); - - Map expectedCounts = new HashMap<>(); - Map expectedResults = new HashMap<>(); - - expectedCounts.put(geoHashCellQuery("pin", geohash, false), 1L); - - expectedCounts.put(geoHashCellQuery("pin", geohash.substring(0, geohash.length() - 1), true), 2L + neighbors.size() + parentNeighbors.size()); - - // Testing point formats and precision - GeoPoint point = GeoPoint.fromGeohash(geohash); - int precision = geohash.length(); - - expectedCounts.put(geoHashCellQuery("pin", point).neighbors(true).precision(precision), 1L + neighbors.size()); - - - List filterBuilders = new ArrayList<>(expectedCounts.keySet()); - for (GeohashCellQuery.Builder builder : filterBuilders) { - try { - long expectedCount = expectedCounts.get(builder); - SearchResponse response = client().prepareSearch("locations").setQuery(QueryBuilders.matchAllQuery()) - .setPostFilter(builder).setSize((int) expectedCount).get(); - assertHitCount(response, expectedCount); - String[] expectedIds = expectedResults.get(builder); - if (expectedIds == null) { - ArrayList ids = new ArrayList<>(); - for (SearchHit hit : response.getHits()) { - ids.add(hit.id()); - } - expectedResults.put(builder, ids.toArray(Strings.EMPTY_ARRAY)); - continue; - } - - assertSearchHits(response, expectedIds); - - } catch (AssertionError error) { - throw new AssertionError(error.getMessage() + "\n geohash_cell filter:" + builder, error); - } - } - } - public void testNeighbors() { // Simple root case assertThat(GeoHashUtils.addNeighbors("7", new ArrayList()), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s")); diff --git a/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java b/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java index 92ed3875e3e..1d864c14dec 100644 --- a/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java +++ b/core/src/test/java/org/elasticsearch/search/geo/GeoPolygonIT.java @@ -59,9 +59,6 @@ public class GeoPolygonIT extends ESIntegTestCase { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); diff --git a/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index 152709d628d..76de9d56e32 100644 --- a/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/core/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -56,7 +56,7 @@ public class QueryProfilerTests extends ESTestCase { static ContextIndexSearcher searcher; @BeforeClass - public static void before() throws IOException { + public static void setup() throws IOException { dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir); final int numDocs = TestUtil.nextInt(random(), 1, 20); @@ -76,7 +76,7 @@ public class QueryProfilerTests extends ESTestCase { } @AfterClass - public static void after() throws IOException { + public static void cleanup() throws IOException { IOUtils.close(reader, dir); dir = null; reader = null; diff --git a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 1a10a700948..b5067dbf74a 100644 --- a/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -668,7 +668,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { .put(SETTING_NUMBER_OF_SHARDS, 3) .put(SETTING_NUMBER_OF_REPLICAS, 0) ); - assertAcked(builder.addMapping("type", "title", "type=string", "body", "type=string")); + assertAcked(builder.addMapping("type", "title", "type=text", "body", "type=text")); ensureGreen(); List builders = new ArrayList<>(); builders.add(client().prepareIndex(idx, "type", "1").setSource( diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index 71d258e1d7c..4528ac3446c 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -19,14 +19,12 @@ package org.elasticsearch.search.query; -import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.util.English; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; @@ -64,9 +62,7 @@ import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.existsQuery; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; import static org.elasticsearch.index.query.QueryBuilders.fuzzyQuery; -import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery; import static org.elasticsearch.index.query.QueryBuilders.idsQuery; -import static org.elasticsearch.index.query.QueryBuilders.indicesQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; @@ -1719,135 +1715,6 @@ public class SearchQueryIT extends ESIntegTestCase { assertHitCount(response, 0); } - public void testIndicesQuery() throws Exception { - createIndex("index1", "index2", "index3"); - - - client().prepareIndex("index1", "type1").setId("1").setSource("text", "value1").get(); - client().prepareIndex("index2", "type2").setId("2").setSource("text", "value2").get(); - client().prepareIndex("index3", "type3").setId("3").setSource("text", "value3").get(); - refresh(); - - SearchResponse searchResponse = client().prepareSearch("index1", "index2", "index3") - .setQuery(indicesQuery(matchQuery("text", "value1"), "index1") - .noMatchQuery(matchQuery("text", "value2"))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "2"); - - //default no match query is match_all - searchResponse = client().prepareSearch("index1", "index2", "index3") - .setQuery(indicesQuery(matchQuery("text", "value1"), "index1")).get(); - assertHitCount(searchResponse, 3L); - assertSearchHits(searchResponse, "1", "2", "3"); - searchResponse = client().prepareSearch("index1", "index2", "index3") - .setQuery(indicesQuery(matchQuery("text", "value1"), "index1") - .noMatchQuery(QueryBuilders.matchAllQuery())).get(); - assertHitCount(searchResponse, 3L); - assertSearchHits(searchResponse, "1", "2", "3"); - - searchResponse = client().prepareSearch("index1", "index2", "index3") - .setQuery(indicesQuery(matchQuery("text", "value1"), "index1") - .noMatchQuery("none")).get(); - assertHitCount(searchResponse, 1L); - assertFirstHit(searchResponse, hasId("1")); - } - - // See #2416 - public void testIndicesQuerySkipParsing() throws Exception { - createIndex("simple"); - assertAcked(prepareCreate("related") - .addMapping("child", jsonBuilder().startObject().startObject("child").startObject("_parent").field("type", "parent") - .endObject().endObject().endObject())); - - client().prepareIndex("simple", "lone").setId("1").setSource("text", "value1").get(); - client().prepareIndex("related", "parent").setId("2").setSource("text", "parent").get(); - client().prepareIndex("related", "child").setId("3").setParent("2").setSource("text", "value2").get(); - refresh(); - - //has_child fails if executed on "simple" index - SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, - () -> client().prepareSearch("simple").setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get()); - assertThat(e.shardFailures().length, greaterThan(0)); - for (ShardSearchFailure shardSearchFailure : e.shardFailures()) { - assertThat(shardSearchFailure.reason(), containsString("no mapping found for type [child]")); - } - - //has_child doesn't get parsed for "simple" index - SearchResponse searchResponse = client().prepareSearch("related", "simple") - .setQuery(indicesQuery(hasChildQuery("child", matchQuery("text", "value2"), ScoreMode.None), "related") - .noMatchQuery(matchQuery("text", "value1"))).get(); - assertHitCount(searchResponse, 2L); - assertSearchHits(searchResponse, "1", "2"); - } - - public void testIndicesQueryMissingIndices() throws IOException, ExecutionException, InterruptedException { - createIndex("index1"); - createIndex("index2"); - - indexRandom(true, - client().prepareIndex("index1", "type1", "1").setSource("field", "match"), - client().prepareIndex("index1", "type1", "2").setSource("field", "no_match"), - client().prepareIndex("index2", "type1", "10").setSource("field", "match"), - client().prepareIndex("index2", "type1", "20").setSource("field", "no_match"), - client().prepareIndex("index3", "type1", "100").setSource("field", "match"), - client().prepareIndex("index3", "type1", "200").setSource("field", "no_match")); - - //all indices are missing - SearchResponse searchResponse = client().prepareSearch().setQuery( - indicesQuery(termQuery("field", "missing"), "test1", "test2", "test3") - .noMatchQuery(termQuery("field", "match"))).get(); - - assertHitCount(searchResponse, 3L); - - for (SearchHit hit : searchResponse.getHits().getHits()) { - if ("index1".equals(hit.index())) { - assertThat(hit, hasId("1")); - } else if ("index2".equals(hit.index())) { - assertThat(hit, hasId("10")); - } else if ("index3".equals(hit.index())) { - assertThat(hit, hasId("100")); - } else { - fail("Returned documents should belong to either index1, index2 or index3"); - } - } - - //only one index specified, which is missing - searchResponse = client().prepareSearch().setQuery( - indicesQuery(termQuery("field", "missing"), "test1") - .noMatchQuery(termQuery("field", "match"))).get(); - - assertHitCount(searchResponse, 3L); - - for (SearchHit hit : searchResponse.getHits().getHits()) { - if ("index1".equals(hit.index())) { - assertThat(hit, hasId("1")); - } else if ("index2".equals(hit.index())) { - assertThat(hit, hasId("10")); - } else if ("index3".equals(hit.index())) { - assertThat(hit, hasId("100")); - } else { - fail("Returned documents should belong to either index1, index2 or index3"); - } - } - - //more than one index specified, one of them is missing - searchResponse = client().prepareSearch().setQuery( - indicesQuery(termQuery("field", "missing"), "index1", "test1") - .noMatchQuery(termQuery("field", "match"))).get(); - - assertHitCount(searchResponse, 2L); - - for (SearchHit hit : searchResponse.getHits().getHits()) { - if ("index2".equals(hit.index())) { - assertThat(hit, hasId("10")); - } else if ("index3".equals(hit.index())) { - assertThat(hit, hasId("100")); - } else { - fail("Returned documents should belong to either index2 or index3"); - } - } - } - public void testMinScore() throws ExecutionException, InterruptedException { createIndex("test"); diff --git a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 1af3bb99b28..41085a9901e 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -345,7 +345,7 @@ public class SimpleQueryStringIT extends ESIntegTestCase { .startObject("type1") .startObject("properties") .startObject("body") - .field("type", "string") + .field("type", "text") .field("analyzer", "stop") .endObject() .endObject() diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 006f69e4774..df50d3126c7 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -39,9 +39,9 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.ContentPath; -import org.elasticsearch.index.mapper.LegacyDoubleFieldMapper.DoubleFieldType; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper.BuilderContext; +import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.ObjectMapper.Nested; import org.elasticsearch.index.query.IdsQueryBuilder; @@ -227,11 +227,11 @@ public abstract class AbstractSortTestCase> extends EST } /** - * Return a field type. We use {@link DoubleFieldType} by default since it is compatible with all sort modes + * Return a field type. We use {@link NumberFieldMapper.NumberFieldType} by default since it is compatible with all sort modes * Tests that require other field type than double can override this. */ protected MappedFieldType provideMappedFieldType(String name) { - DoubleFieldType doubleFieldType = new DoubleFieldType(); + NumberFieldMapper.NumberFieldType doubleFieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE); doubleFieldType.setName(name); doubleFieldType.setHasDocValues(true); return doubleFieldType; diff --git a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java index 38f6178caee..d6863d03931 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java +++ b/core/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java @@ -20,33 +20,25 @@ package org.elasticsearch.search.sort; import org.elasticsearch.Version; -import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.query.GeoDistanceQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.SearchHit; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.List; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery; -import static org.elasticsearch.index.query.QueryBuilders.geoDistanceRangeQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -54,9 +46,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirs import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; -import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -67,157 +57,11 @@ public class GeoDistanceIT extends ESIntegTestCase { return Arrays.asList(InternalSettingsPlugin.class); } - public void testLegacyGeoDistanceRangeQuery() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_4_0); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") - .startObject("location").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } - xContentBuilder.endObject().endObject().endObject().endObject(); - assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); - ensureGreen(); - - indexRandom(true, - client().prepareIndex("test", "type1", "1") - .setSource(jsonBuilder().startObject().field("name", "New York").startObject("location").field("lat", 40.7143528) - .field("lon", -74.0059731).endObject().endObject()), - // to NY: 5.286 km - client().prepareIndex("test", "type1", "2") - .setSource(jsonBuilder().startObject().field("name", "Times Square").startObject("location").field("lat", 40.759011) - .field("lon", -73.9844722).endObject().endObject()), - // to NY: 0.4621 km - client().prepareIndex("test", "type1", "3") - .setSource(jsonBuilder().startObject().field("name", "Tribeca").startObject("location").field("lat", 40.718266) - .field("lon", -74.007819).endObject().endObject()), - // to NY: 1.055 km - client().prepareIndex("test", "type1", "4") - .setSource(jsonBuilder().startObject().field("name", "Wall Street").startObject("location").field("lat", 40.7051157) - .field("lon", -74.0088305).endObject().endObject()), - // to NY: 1.258 km - client().prepareIndex("test", "type1", "5") - .setSource(jsonBuilder().startObject().field("name", "Soho").startObject("location").field("lat", 40.7247222) - .field("lon", -74).endObject().endObject()), - // to NY: 2.029 km - client().prepareIndex("test", "type1", "6") - .setSource(jsonBuilder().startObject().field("name", "Greenwich Village").startObject("location") - .field("lat", 40.731033).field("lon", -73.9962255).endObject().endObject()), - // to NY: 8.572 km - client().prepareIndex("test", "type1", "7").setSource(jsonBuilder().startObject().field("name", "Brooklyn") - .startObject("location").field("lat", 40.65).field("lon", -73.95).endObject().endObject())); - - SearchResponse searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("3km").point(40.7143528, -74.0059731)).execute().actionGet(); - assertHitCount(searchResponse, 5); - assertThat(searchResponse.getHits().hits().length, equalTo(5)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"), equalTo("6"))); - } - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("3km").point(40.7143528, -74.0059731).optimizeBbox("indexed")).execute() - .actionGet(); - assertHitCount(searchResponse, 5); - assertThat(searchResponse.getHits().hits().length, equalTo(5)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"), equalTo("6"))); - } - - // now with a PLANE type - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("3km").geoDistance(GeoDistance.PLANE).point(40.7143528, -74.0059731)) - .execute().actionGet(); - assertHitCount(searchResponse, 5); - assertThat(searchResponse.getHits().hits().length, equalTo(5)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"), equalTo("6"))); - } - - // factor type is really too small for this resolution - - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("2km").point(40.7143528, -74.0059731)).execute().actionGet(); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().hits().length, equalTo(4)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); - } - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("2km").point(40.7143528, -74.0059731).optimizeBbox("indexed")).execute() - .actionGet(); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().hits().length, equalTo(4)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); - } - - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("1.242mi").point(40.7143528, -74.0059731)).execute().actionGet(); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().hits().length, equalTo(4)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); - } - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceQuery("location").distance("1.242mi").point(40.7143528, -74.0059731).optimizeBbox("indexed")).execute() - .actionGet(); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().hits().length, equalTo(4)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("1"), equalTo("3"), equalTo("4"), equalTo("5"))); - } - - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).from("1.0km").to("2.0km")).execute().actionGet(); - assertHitCount(searchResponse, 2); - assertThat(searchResponse.getHits().hits().length, equalTo(2)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("4"), equalTo("5"))); - } - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).from("1.0km").to("2.0km").optimizeBbox("indexed")) - .execute().actionGet(); - assertHitCount(searchResponse, 2); - assertThat(searchResponse.getHits().hits().length, equalTo(2)); - for (SearchHit hit : searchResponse.getHits()) { - assertThat(hit.id(), anyOf(equalTo("4"), equalTo("5"))); - } - - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).to("2.0km")).execute().actionGet(); - assertHitCount(searchResponse, 4); - assertThat(searchResponse.getHits().hits().length, equalTo(4)); - - searchResponse = client().prepareSearch() // from NY - .setQuery(geoDistanceRangeQuery("location", 40.7143528, -74.0059731).from("2.0km")).execute().actionGet(); - assertHitCount(searchResponse, 3); - assertThat(searchResponse.getHits().hits().length, equalTo(3)); - - // SORTING - - searchResponse = client().prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("location", 40.7143528, -74.0059731).order(SortOrder.ASC)).execute() - .actionGet(); - - assertHitCount(searchResponse, 7); - assertOrderedSearchHits(searchResponse, "1", "3", "4", "5", "6", "2", "7"); - - searchResponse = client().prepareSearch().setQuery(matchAllQuery()) - .addSort(SortBuilders.geoDistanceSort("location", 40.7143528, -74.0059731).order(SortOrder.DESC)).execute() - .actionGet(); - - assertHitCount(searchResponse, 7); - assertOrderedSearchHits(searchResponse, "7", "2", "6", "5", "4", "3", "1"); - } - public void testDistanceSortingMVFields() throws Exception { Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("locations").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true).field("coerce", true); - } xContentBuilder.field("ignore_malformed", true).endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); @@ -347,9 +191,6 @@ public class GeoDistanceIT extends ESIntegTestCase { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") .startObject("locations").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder)); ensureGreen(); @@ -396,9 +237,6 @@ public class GeoDistanceIT extends ESIntegTestCase { .startObject("name").field("type", "text").endObject().startObject("branches").field("type", "nested") .startObject("properties").startObject("name").field("type", "text").endObject().startObject("location") .field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - xContentBuilder.field("lat_lon", true); - } xContentBuilder.endObject().endObject().endObject().endObject().endObject().endObject(); assertAcked(prepareCreate("companies").setSettings(settings).addMapping("company", xContentBuilder)); @@ -548,9 +386,6 @@ public class GeoDistanceIT extends ESIntegTestCase { XContentBuilder mapping = JsonXContent.contentBuilder().startObject().startObject("location").startObject("properties") .startObject("pin").field("type", "geo_point"); - if (version.before(Version.V_2_2_0)) { - mapping.field("lat_lon", true); - } mapping.endObject().endObject().endObject().endObject(); XContentBuilder source = JsonXContent.contentBuilder().startObject().field("pin", GeoHashUtils.stringEncode(lon, lat)).endObject(); @@ -567,55 +402,4 @@ public class GeoDistanceIT extends ESIntegTestCase { assertHitCount(result, 1); } - private static double randomLon() { - return randomDouble() * 360 - 180; - } - - private static double randomLat() { - return randomDouble() * 180 - 90; - } - - public void testDuelOptimizations() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_1_2); - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); - assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point,lat_lon=true")); - final int numDocs = scaledRandomIntBetween(3000, 10000); - List docs = new ArrayList<>(); - for (int i = 0; i < numDocs; ++i) { - docs.add(client().prepareIndex("index", "type").setSource(jsonBuilder().startObject().startObject("location") - .field("lat", randomLat()).field("lon", randomLon()).endObject().endObject())); - } - indexRandom(true, docs); - ensureSearchable(); - - for (int i = 0; i < 10; ++i) { - final double originLat = randomLat(); - final double originLon = randomLon(); - final String distance = DistanceUnit.KILOMETERS.toString(randomIntBetween(1, 10000)); - for (GeoDistance geoDistance : Arrays.asList(GeoDistance.ARC, GeoDistance.SLOPPY_ARC)) { - logger.info("Now testing GeoDistance={}, distance={}, origin=({}, {})", geoDistance, distance, originLat, originLon); - GeoDistanceQueryBuilder qb = QueryBuilders.geoDistanceQuery("location").point(originLat, originLon).distance(distance) - .geoDistance(geoDistance); - long matches; - for (String optimizeBbox : Arrays.asList("none", "memory", "indexed")) { - qb.optimizeBbox(optimizeBbox); - SearchResponse resp = client().prepareSearch("index").setSize(0).setQuery(QueryBuilders.constantScoreQuery(qb)) - .execute().actionGet(); - matches = assertDuelOptimization(resp); - logger.info("{} -> {} hits", optimizeBbox, matches); - } - } - } - } - - private static long assertDuelOptimization(SearchResponse resp) { - long matches = -1; - assertSearchResponse(resp); - if (matches < 0) { - matches = resp.getHits().totalHits(); - } else { - assertEquals(matches, matches = resp.getHits().totalHits()); - } - return matches; - } } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearch2xIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearch2xIT.java deleted file mode 100644 index 172183c57c8..00000000000 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearch2xIT.java +++ /dev/null @@ -1,1199 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest; - -import com.carrotsearch.hppc.ObjectLongHashMap; -import com.carrotsearch.randomizedtesting.generators.RandomStrings; - -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; -import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; -import org.elasticsearch.action.admin.indices.segments.ShardSegments; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchPhaseExecutionException; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.mapper.CompletionFieldMapper2x; -import org.elasticsearch.index.mapper.MapperException; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode; -import org.elasticsearch.search.sort.FieldSortBuilder; -import org.elasticsearch.search.suggest.completion.CompletionStats; -import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; -import org.elasticsearch.search.suggest.completion.FuzzyOptions; -import org.elasticsearch.search.suggest.completion2x.CompletionSuggestion; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.concurrent.ExecutionException; - -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; -import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllSuccessful; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.hasItems; -import static org.hamcrest.core.Is.is; -import static org.hamcrest.core.IsCollectionContaining.hasItem; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.hamcrest.core.IsNull.notNullValue; - -@SuppressCodecs("*") // requires custom completion format -public class CompletionSuggestSearch2xIT extends ESIntegTestCase { - - private final String INDEX = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); - private final String TYPE = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); - private final String FIELD = RandomStrings.randomAsciiOfLength(getRandom(), 10).toLowerCase(Locale.ROOT); - private final Version PRE2X_VERSION = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_1); - private final CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder(); - - @Override - protected Collection> nodePlugins() { - return Arrays.asList(InternalSettingsPlugin.class); - } - - public void testSimple() throws Exception { - createIndexAndMapping(completionMappingBuilder); - String[][] input = {{"Foo Fighters"}, {"Foo Fighters"}, {"Foo Fighters"}, {"Foo Fighters"}, - {"Generator", "Foo Fighters Generator"}, {"Learn to Fly", "Foo Fighters Learn to Fly"}, - {"The Prodigy"}, {"The Prodigy"}, {"The Prodigy"}, {"Firestarter", "The Prodigy Firestarter"}, - {"Turbonegro"}, {"Turbonegro"}, {"Get it on", "Turbonegro Get it on"}}; // work with frequencies - for (int i = 0; i < input.length; i++) { - client().prepareIndex(INDEX, TYPE, "" + i) - .setSource(jsonBuilder() - .startObject().startObject(FIELD) - .array("input", input[i]) - .endObject() - .endObject() - ) - .execute().actionGet(); - } - - refresh(); - - assertSuggestionsNotInOrder("f", "Foo Fighters", "Firestarter", "Foo Fighters Generator", "Foo Fighters Learn to Fly"); - assertSuggestionsNotInOrder("t", "The Prodigy", "Turbonegro", "Turbonegro Get it on", "The Prodigy Firestarter"); - } - - public void testBasicPrefixSuggestion() throws Exception { - completionMappingBuilder.payloads(true); - createIndexAndMapping(completionMappingBuilder); - for (int i = 0; i < 2; i++) { - createData(i == 0); - assertSuggestions("f", "Firestarter - The Prodigy", "Foo Fighters", - "Generator - Foo Fighters", "Learn to Fly - Foo Fighters"); - assertSuggestions("ge", "Generator - Foo Fighters", "Get it on - Turbonegro"); - assertSuggestions("ge", "Generator - Foo Fighters", "Get it on - Turbonegro"); - assertSuggestions("t", "The Prodigy", "Firestarter - The Prodigy", "Get it on - Turbonegro", "Turbonegro"); - } - } - - public void testThatWeightsAreWorking() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - List similarNames = Arrays.asList("the", "The Prodigy", "The Verve", "The the"); - // the weight is 1000 divided by string length, so the results are easy to to check - for (String similarName : similarNames) { - client().prepareIndex(INDEX, TYPE, similarName).setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value(similarName).endArray() - .field("weight", 1000 / similarName.length()) - .endObject().endObject() - ).get(); - } - - refresh(); - - assertSuggestions("the", "the", "The the", "The Verve", "The Prodigy"); - } - - public void testThatWeightMustBeAnInteger() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("sth").endArray() - .field("weight", 2.5) - .endObject().endObject() - ).get(); - fail("Indexing with a float weight was successful, but should not be"); - } catch (MapperParsingException e) { - assertThat(e.toString(), containsString("2.5")); - } - } - - public void testThatWeightCanBeAString() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("testing").endArray() - .field("weight", "10") - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("testSuggestions", - new CompletionSuggestionBuilder(FIELD).text("test").size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, "testSuggestions", "testing"); - Suggest.Suggestion.Entry.Option option = suggestResponse.getSuggest().getSuggestion("testSuggestions") - .getEntries().get(0).getOptions().get(0); - assertThat(option, is(instanceOf(CompletionSuggestion.Entry.Option.class))); - CompletionSuggestion.Entry.Option prefixOption = (CompletionSuggestion.Entry.Option) option; - - assertThat(prefixOption.getText().string(), equalTo("testing")); - assertThat(prefixOption.getScore(), equalTo(10F)); - } - - public void testThatWeightMustNotBeANonNumberString() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("sth").endArray() - .field("weight", "thisIsNotValid") - .endObject().endObject() - ).get(); - fail("Indexing with a non-number representing string as weight was successful, but should not be"); - } catch (MapperParsingException e) { - assertThat(e.toString(), containsString("thisIsNotValid")); - } - } - - public void testThatWeightAsStringMustBeInt() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - String weight = String.valueOf(Long.MAX_VALUE - 4); - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("testing").endArray() - .field("weight", weight) - .endObject().endObject() - ).get(); - fail("Indexing with weight string representing value > Int.MAX_VALUE was successful, but should not be"); - } catch (MapperParsingException e) { - assertThat(e.toString(), containsString(weight)); - } - } - - public void testThatInputCanBeAStringInsteadOfAnArray() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .field("input", "Foo Fighters") - .field("output", "Boo Fighters") - .endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("f", "Boo Fighters"); - } - - public void testThatPayloadsAreArbitraryJsonObjects() throws Exception { - completionMappingBuilder.payloads(true); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .field("output", "Boo Fighters") - .startObject("payload").field("foo", "bar").startArray("test").value("spam").value("eggs") - .endArray().endObject() - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("testSuggestions", - new CompletionSuggestionBuilder(FIELD).text("foo").size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, "testSuggestions", "Boo Fighters"); - Suggest.Suggestion.Entry.Option option = suggestResponse.getSuggest() - .getSuggestion("testSuggestions").getEntries().get(0).getOptions().get(0); - assertThat(option, is(instanceOf(CompletionSuggestion.Entry.Option.class))); - CompletionSuggestion.Entry.Option prefixOption = (CompletionSuggestion.Entry.Option) option; - assertThat(prefixOption.getPayload(), is(notNullValue())); - - // parse JSON - Map jsonMap = prefixOption.getPayloadAsMap(); - assertThat(jsonMap.size(), is(2)); - assertThat(jsonMap.get("foo").toString(), is("bar")); - assertThat(jsonMap.get("test"), is(instanceOf(List.class))); - List listValues = (List) jsonMap.get("test"); - assertThat(listValues, hasItems("spam", "eggs")); - } - - public void testPayloadAsNumeric() throws Exception { - completionMappingBuilder.payloads(true); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .field("output", "Boo Fighters") - .field("payload", 1) - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("testSuggestions", - new CompletionSuggestionBuilder(FIELD).text("foo").size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, "testSuggestions", "Boo Fighters"); - Suggest.Suggestion.Entry.Option option = suggestResponse.getSuggest() - .getSuggestion("testSuggestions").getEntries().get(0).getOptions().get(0); - assertThat(option, is(instanceOf(CompletionSuggestion.Entry.Option.class))); - CompletionSuggestion.Entry.Option prefixOption = (CompletionSuggestion.Entry.Option) option; - assertThat(prefixOption.getPayload(), is(notNullValue())); - - assertThat(prefixOption.getPayloadAsLong(), equalTo(1L)); - } - - public void testPayloadAsString() throws Exception { - completionMappingBuilder.payloads(true); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .field("output", "Boo Fighters") - .field("payload", "test") - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("testSuggestions", - new CompletionSuggestionBuilder(FIELD).text("foo").size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, "testSuggestions", "Boo Fighters"); - Suggest.Suggestion.Entry.Option option = suggestResponse.getSuggest() - .getSuggestion("testSuggestions").getEntries().get(0).getOptions().get(0); - assertThat(option, is(instanceOf(CompletionSuggestion.Entry.Option.class))); - CompletionSuggestion.Entry.Option prefixOption = (CompletionSuggestion.Entry.Option) option; - assertThat(prefixOption.getPayload(), is(notNullValue())); - - assertThat(prefixOption.getPayloadAsString(), equalTo("test")); - } - - public void testThatExceptionIsThrownWhenPayloadsAreDisabledButInIndexRequest() throws Exception { - completionMappingBuilder.payloads(false); - createIndexAndMapping(completionMappingBuilder); - - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .field("output", "Boo Fighters") - .startArray("payload").value("spam").value("eggs").endArray() - .endObject().endObject() - ).get(); - fail("expected MapperException"); - } catch (MapperException expected) { - } - } - - public void testDisabledPreserveSeparators() throws Exception { - completionMappingBuilder.preserveSeparators(false); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .field("weight", 10) - .endObject().endObject() - ).get(); - - client().prepareIndex(INDEX, TYPE, "2").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foof").endArray() - .field("weight", 20) - .endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("foof", "Foof", "Foo Fighters"); - } - - public void testEnabledPreserveSeparators() throws Exception { - completionMappingBuilder.preserveSeparators(true); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .endObject().endObject() - ).get(); - - client().prepareIndex(INDEX, TYPE, "2").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foof").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("foof", "Foof"); - } - - public void testThatMultipleInputsAreSupported() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").value("Fu Fighters").endArray() - .field("output", "The incredible Foo Fighters") - .endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("foo", "The incredible Foo Fighters"); - assertSuggestions("fu", "The incredible Foo Fighters"); - } - - public void testThatShortSyntaxIsWorking() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startArray(FIELD) - .value("The Prodigy Firestarter").value("Firestarter") - .endArray().endObject() - ).get(); - - refresh(); - - assertSuggestions("t", "The Prodigy Firestarter"); - assertSuggestions("f", "Firestarter"); - } - - public void testThatDisablingPositionIncrementsWorkForStopwords() throws Exception { - // analyzer which removes stopwords... so may not be the simple one - completionMappingBuilder.searchAnalyzer("classic").indexAnalyzer("classic").preservePositionIncrements(false); - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("The Beatles").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("b", "The Beatles"); - } - - public void testThatSynonymsWork() throws Exception { - Settings.Builder settingsBuilder = Settings.builder() - .put("analysis.analyzer.suggest_analyzer_synonyms.type", "custom") - .put("analysis.analyzer.suggest_analyzer_synonyms.tokenizer", "standard") - .putArray("analysis.analyzer.suggest_analyzer_synonyms.filter", "standard", "lowercase", "my_synonyms") - .put("analysis.filter.my_synonyms.type", "synonym") - .putArray("analysis.filter.my_synonyms.synonyms", "foo,renamed"); - completionMappingBuilder.searchAnalyzer("suggest_analyzer_synonyms").indexAnalyzer("suggest_analyzer_synonyms"); - createIndexAndMappingAndSettings(settingsBuilder.build(), completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Foo Fighters").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - // get suggestions for renamed - assertSuggestions("r", "Foo Fighters"); - } - - public void testThatUpgradeToMultiFieldTypeWorks() throws Exception { - final XContentBuilder mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "string") - .endObject() - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, mapping)); - client().prepareIndex(INDEX, TYPE, "1") - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(INDEX).setType(TYPE) - .setSource(jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "string") - .startObject("fields") - .startObject("suggest").field("type", "completion").field("analyzer", "simple").endObject() - .endObject() - .endObject() - .endObject().endObject() - .endObject()) - .get(); - assertThat(putMappingResponse.isAcknowledged(), is(true)); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("suggs", - new CompletionSuggestionBuilder(FIELD + ".suggest").text("f").size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, "suggs"); - - client().prepareIndex(INDEX, TYPE, "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - SearchResponse afterReindexingResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("suggs", - SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)) - ).execute().actionGet(); - assertSuggestions(afterReindexingResponse, "suggs", "Foo Fighters"); - } - - public void testThatUpgradeToMultiFieldsWorks() throws Exception { - final XContentBuilder mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "string") - .endObject() - .endObject() - .endObject() - .endObject(); - assertAcked(prepareCreate(INDEX) - .addMapping(TYPE, mapping) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id))); - client().prepareIndex(INDEX, TYPE, "1") - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - PutMappingResponse putMappingResponse = client().admin().indices() - .preparePutMapping(INDEX).setType(TYPE).setSource(jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "string") - .startObject("fields") - .startObject("suggest").field("type", "completion").field("analyzer", "simple").endObject() - .endObject() - .endObject() - .endObject().endObject() - .endObject()) - .get(); - assertThat(putMappingResponse.isAcknowledged(), is(true)); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("suggs", - SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, "suggs"); - - client().prepareIndex(INDEX, TYPE, "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").endObject()).get(); - ensureGreen(INDEX); - - SearchResponse afterReindexingResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("suggs", - SuggestBuilders.completionSuggestion(FIELD + ".suggest").text("f").size(10)) - ).execute().actionGet(); - assertSuggestions(afterReindexingResponse, "suggs", "Foo Fighters"); - } - - public void testThatFuzzySuggesterWorks() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nirv", Fuzziness.ONE).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - - suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", Fuzziness.ONE).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - } - - public void testThatFuzzySuggesterSupportsEditDistances() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - // edit distance 1 - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.ONE).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo"); - - // edit distance 2 - suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Norw", Fuzziness.TWO).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - } - - public void testThatFuzzySuggesterSupportsTranspositions() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", - FuzzyOptions.builder().setTranspositions(false).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo"); - - suggestResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nriv", - FuzzyOptions.builder().setTranspositions(true).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - } - - public void testThatFuzzySuggesterSupportsMinPrefixLength() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nriva", - FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo"); - - suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nrivan", - FuzzyOptions.builder().setFuzzyMinLength(6).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - } - - public void testThatFuzzySuggesterSupportsNonPrefixLength() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nirw", - FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo"); - - suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", - SuggestBuilders.completionSuggestion(FIELD).prefix("Nirvo", - FuzzyOptions.builder().setFuzzyPrefixLength(4).build()).size(10)) - ).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "Nirvana"); - } - - public void testThatFuzzySuggesterIsUnicodeAware() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("ööööö").endArray() - .endObject().endObject() - ).get(); - - refresh(); - - // suggestion with a character, which needs unicode awareness - CompletionSuggestionBuilder completionSuggestionBuilder = - SuggestBuilders.completionSuggestion(FIELD).prefix("öööи", - FuzzyOptions.builder().setUnicodeAware(true).build()).size(10); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "ööööö"); - - // removing unicode awareness leads to no result - completionSuggestionBuilder.prefix("öööи", FuzzyOptions.builder().setUnicodeAware(false).build()); - suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo" ,completionSuggestionBuilder)).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo"); - - // increasing edit distance instead of unicode awareness works again, as this is only a single character - completionSuggestionBuilder.prefix("öööи", FuzzyOptions.builder().setFuzziness(2).build()); - suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("foo", completionSuggestionBuilder)).execute().actionGet(); - assertSuggestions(suggestResponse, false, "foo", "ööööö"); - } - - public void testThatStatsAreWorking() throws Exception { - String otherField = "testOtherField"; - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id))); - - PutMappingResponse putMappingResponse = client().admin().indices() - .preparePutMapping(INDEX).setType(TYPE).setSource(jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD.toString()) - .field("type", "completion").field("analyzer", "simple") - .endObject() - .startObject(otherField) - .field("type", "completion").field("analyzer", "simple") - .endObject() - .endObject().endObject().endObject()) - .get(); - assertThat(putMappingResponse.isAcknowledged(), is(true)); - - // Index two entities - client().prepareIndex(INDEX, TYPE, "1").setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Foo Fighters").field(otherField, "WHATEVER").endObject()) - .get(); - client().prepareIndex(INDEX, TYPE, "2").setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .setSource(jsonBuilder().startObject().field(FIELD, "Bar Fighters").field(otherField, "WHATEVER2").endObject()) - .get(); - - // Get all stats - IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats(INDEX).setIndices(INDEX) - .setCompletion(true).get(); - CompletionStats completionStats = indicesStatsResponse.getIndex(INDEX).getPrimaries().completion; - assertThat(completionStats, notNullValue()); - long totalSizeInBytes = completionStats.getSizeInBytes(); - assertThat(totalSizeInBytes, is(greaterThan(0L))); - - IndicesStatsResponse singleFieldStats = client().admin().indices().prepareStats(INDEX) - .setIndices(INDEX).setCompletion(true).setCompletionFields(FIELD).get(); - long singleFieldSizeInBytes = singleFieldStats.getIndex(INDEX).getPrimaries().completion.getFields().get(FIELD); - IndicesStatsResponse otherFieldStats = client().admin().indices().prepareStats(INDEX) - .setIndices(INDEX).setCompletion(true).setCompletionFields(otherField).get(); - long otherFieldSizeInBytes = otherFieldStats.getIndex(INDEX).getPrimaries().completion.getFields().get(otherField); - assertThat(singleFieldSizeInBytes + otherFieldSizeInBytes, is(totalSizeInBytes)); - - // regexes - IndicesStatsResponse regexFieldStats = client().admin().indices().prepareStats(INDEX) - .setIndices(INDEX).setCompletion(true).setCompletionFields("*").get(); - ObjectLongHashMap fields = regexFieldStats.getIndex(INDEX).getPrimaries().completion.getFields(); - long regexSizeInBytes = fields.get(FIELD) + fields.get(otherField); - assertThat(regexSizeInBytes, is(totalSizeInBytes)); - } - - public void testThatSortingOnCompletionFieldReturnsUsefulException() throws Exception { - createIndexAndMapping(completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Nirvana").endArray() - .endObject().endObject() - ).get(); - - refresh(); - try { - client().prepareSearch(INDEX).setTypes(TYPE).addSort(new FieldSortBuilder(FIELD)).execute().actionGet(); - fail("Expected an exception due to trying to sort on completion field, but did not happen"); - } catch (SearchPhaseExecutionException e) { - assertThat(e.status().getStatus(), is(400)); - assertThat(e.toString(), containsString("Fielddata is not supported on field [" + FIELD + "] of type [completion]]")); - } - } - - public void testThatSuggestStopFilterWorks() throws Exception { - Settings.Builder settingsBuilder = Settings.builder() - .put("index.analysis.analyzer.stoptest.tokenizer", "standard") - .putArray("index.analysis.analyzer.stoptest.filter", "standard", "suggest_stop_filter") - .put("index.analysis.filter.suggest_stop_filter.type", "stop") - .put("index.analysis.filter.suggest_stop_filter.remove_trailing", false); - - CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder(); - completionMappingBuilder.preserveSeparators(true).preservePositionIncrements(true); - completionMappingBuilder.searchAnalyzer("stoptest"); - completionMappingBuilder.indexAnalyzer("simple"); - createIndexAndMappingAndSettings(settingsBuilder.build(), completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Feed trolls").endArray() - .field("weight", 5).endObject().endObject() - ).get(); - - // Higher weight so it's ranked first: - client().prepareIndex(INDEX, TYPE, "2").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("Feed the trolls").endArray() - .field("weight", 10).endObject().endObject() - ).get(); - - refresh(); - - assertSuggestions("f", "Feed the trolls", "Feed trolls"); - assertSuggestions("fe", "Feed the trolls", "Feed trolls"); - assertSuggestions("fee", "Feed the trolls", "Feed trolls"); - assertSuggestions("feed", "Feed the trolls", "Feed trolls"); - assertSuggestions("feed t", "Feed the trolls", "Feed trolls"); - assertSuggestions("feed the", "Feed the trolls"); - // stop word complete, gets ignored on query time, makes it "feed" only - assertSuggestions("feed the ", "Feed the trolls", "Feed trolls"); - // stopword gets removed, but position increment kicks in, which doesnt work for the prefix suggester - assertSuggestions("feed the t"); - } - - public void testThatIndexingInvalidFieldsInCompletionFieldResultsInException() throws Exception { - CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder(); - createIndexAndMapping(completionMappingBuilder); - - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("FRIGGININVALID").value("Nirvana").endArray() - .endObject().endObject()).get(); - fail("expected MapperParsingException"); - } catch (MapperParsingException expected) {} - } - - - public void assertSuggestions(String suggestion, String... suggestions) { - String suggestionName = RandomStrings.randomAsciiOfLength(random(), 10); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, - SuggestBuilders.completionSuggestion(FIELD).text(suggestion).size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, suggestionName, suggestions); - } - - public void assertSuggestionsNotInOrder(String suggestString, String... suggestions) { - String suggestionName = RandomStrings.randomAsciiOfLength(random(), 10); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, - SuggestBuilders.completionSuggestion(FIELD).text(suggestString).size(10)) - ).execute().actionGet(); - - assertSuggestions(suggestResponse, false, suggestionName, suggestions); - } - - private void assertSuggestions(SearchResponse suggestResponse, String name, String... suggestions) { - assertSuggestions(suggestResponse, true, name, suggestions); - } - - private void assertSuggestions(SearchResponse suggestResponse, boolean suggestionOrderStrict, String name, - String... suggestions) { - assertAllSuccessful(suggestResponse); - - List suggestionNames = new ArrayList<>(); - for (Suggest.Suggestion> suggestion : - suggestResponse.getSuggest()) { - suggestionNames.add(suggestion.getName()); - } - String expectFieldInResponseMsg = - String.format(Locale.ROOT, "Expected suggestion named %s in response, got %s", name, suggestionNames); - assertThat(expectFieldInResponseMsg, suggestResponse.getSuggest().getSuggestion(name), is(notNullValue())); - - Suggest.Suggestion> suggestion = - suggestResponse.getSuggest().getSuggestion(name); - - List suggestionList = getNames(suggestion.getEntries().get(0)); - List options = suggestion.getEntries().get(0).getOptions(); - - String assertMsg = String.format(Locale.ROOT, "Expected options %s length to be %s, but was %s", - suggestionList, suggestions.length, options.size()); - assertThat(assertMsg, options.size(), is(suggestions.length)); - if (suggestionOrderStrict) { - for (int i = 0; i < suggestions.length; i++) { - String errMsg = String.format(Locale.ROOT, "Expected elem %s in list %s to be [%s] score: %s", - i, suggestionList, suggestions[i], options.get(i).getScore()); - assertThat(errMsg, options.get(i).getText().toString(), is(suggestions[i])); - } - } else { - for (String expectedSuggestion : suggestions) { - String errMsg = String.format(Locale.ROOT, "Expected elem %s to be in list %s", - expectedSuggestion, suggestionList); - assertThat(errMsg, suggestionList, hasItem(expectedSuggestion)); - } - } - } - - private List getNames(Suggest.Suggestion.Entry suggestEntry) { - List names = new ArrayList<>(); - for (Suggest.Suggestion.Entry.Option entry : suggestEntry.getOptions()) { - names.add(entry.getText().string()); - } - return names; - } - - private void createIndexAndMappingAndSettings(Settings settings, CompletionMappingBuilder completionMappingBuilder) - throws IOException { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(indexSettings()).put(settings) - .put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id).build()) - .addMapping(TYPE, jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .field("analyzer", completionMappingBuilder.indexAnalyzer) - .field("search_analyzer", completionMappingBuilder.searchAnalyzer) - .field("payloads", completionMappingBuilder.payloads) - .field("preserve_separators", completionMappingBuilder.preserveSeparators) - .field("preserve_position_increments", completionMappingBuilder.preservePositionIncrements) - .endObject() - .endObject().endObject() - .endObject()) - .get()); - } - - private void createIndexAndMapping(CompletionMappingBuilder completionMappingBuilder) throws IOException { - createIndexAndMappingAndSettings(Settings.EMPTY, completionMappingBuilder); - } - - private void createData(boolean optimize) throws IOException, InterruptedException, ExecutionException { - String[][] input = {{"Foo Fighters"}, {"Generator", "Foo Fighters Generator"}, - {"Learn to Fly", "Foo Fighters Learn to Fly"}, {"The Prodigy"}, {"Firestarter", "The Prodigy Firestarter"}, - {"Turbonegro"}, {"Get it on", "Turbonegro Get it on"}}; - String[] surface = {"Foo Fighters", "Generator - Foo Fighters", "Learn to Fly - Foo Fighters", "The Prodigy", - "Firestarter - The Prodigy", "Turbonegro", "Get it on - Turbonegro"}; - int[] weight = {10, 9, 8, 12, 11, 6, 7}; - IndexRequestBuilder[] builders = new IndexRequestBuilder[input.length]; - for (int i = 0; i < builders.length; i++) { - builders[i] = client().prepareIndex(INDEX, TYPE, "" + i) - .setSource(jsonBuilder() - .startObject().startObject(FIELD) - .array("input", input[i]) - .field("output", surface[i]) - .startObject("payload").field("id", i).endObject() - .field("weight", 1) // WE FORCEFULLY INDEX A BOGUS WEIGHT - .endObject() - .endObject() - ); - } - indexRandom(false, builders); - - for (int i = 0; i < builders.length; i++) { // add them again to make sure we deduplicate on the surface form - builders[i] = client().prepareIndex(INDEX, TYPE, "n" + i) - .setSource(jsonBuilder() - .startObject().startObject(FIELD) - .array("input", input[i]) - .field("output", surface[i]) - .startObject("payload").field("id", i).endObject() - .field("weight", weight[i]) - .endObject() - .endObject() - ); - } - indexRandom(false, builders); - - client().admin().indices().prepareRefresh(INDEX).execute().actionGet(); - if (optimize) { - // make sure merging works just fine - client().admin().indices().prepareFlush(INDEX).execute().actionGet(); - client().admin().indices().prepareForceMerge(INDEX).setMaxNumSegments(randomIntBetween(1, 5)).get(); - } - } - - // see #3555 - public void testPrunedSegments() throws IOException { - createIndexAndMappingAndSettings( - Settings.builder().put(SETTING_NUMBER_OF_SHARDS, 1).put(SETTING_NUMBER_OF_REPLICAS, 0).build(), - completionMappingBuilder); - - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value("The Beatles").endArray() - .endObject().endObject() - ).get(); - client().prepareIndex(INDEX, TYPE, "2").setSource(jsonBuilder() - .startObject() - .field("somefield", "somevalue") - .endObject() - ).get(); // we have 2 docs in a segment... - ForceMergeResponse actionGet = client().admin().indices().prepareForceMerge().setFlush(true) - .setMaxNumSegments(1).execute().actionGet(); - assertAllSuccessful(actionGet); - refresh(); - // update the first one and then merge.. the target segment will have no value in FIELD - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject() - .field("somefield", "somevalue") - .endObject() - ).get(); - actionGet = client().admin().indices().prepareForceMerge().setFlush(true).setMaxNumSegments(1) - .execute().actionGet(); - assertAllSuccessful(actionGet); - refresh(); - - assertSuggestions("b"); - assertThat(2L, equalTo(client().prepareSearch(INDEX).get().getHits().totalHits())); - for (IndexShardSegments seg : client().admin().indices().prepareSegments().get().getIndices().get(INDEX)) { - ShardSegments[] shards = seg.getShards(); - for (ShardSegments shardSegments : shards) { - assertThat(shardSegments.getSegments().size(), equalTo(1)); - } - } - } - - public void testMaxFieldLength() throws IOException { - client().admin().indices().prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)).get(); - ensureGreen(); - int iters = scaledRandomIntBetween(10, 20); - for (int i = 0; i < iters; i++) { - int maxInputLen = between(3, 50); - String str = replaceReservedChars( - randomRealisticUnicodeOfCodepointLengthBetween(maxInputLen + 1, - maxInputLen + scaledRandomIntBetween(2, 50)), (char) 0x01); - assertAcked(client().admin().indices().preparePutMapping(INDEX) - .setType(TYPE).setSource(jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .field("max_input_length", maxInputLen) - // upgrade mapping each time - .field("analyzer", "keyword") - .endObject() - .endObject().endObject() - .endObject())); - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value(str).endArray() - .field("output", "foobar") - .endObject().endObject() - ).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - // need to flush and refresh, because we keep changing the same document - // we have to make sure that segments without any live documents are deleted - flushAndRefresh(); - int prefixLen = CompletionFieldMapper2x.correctSubStringLen(str, between(1, maxInputLen - 1)); - assertSuggestions(str.substring(0, prefixLen), "foobar"); - if (maxInputLen + 1 < str.length()) { - int offset = Character.isHighSurrogate(str.charAt(maxInputLen - 1)) ? 2 : 1; - int correctSubStringLen = CompletionFieldMapper2x.correctSubStringLen(str, maxInputLen + offset); - String shortenedSuggestion = str.substring(0, correctSubStringLen); - assertSuggestions(shortenedSuggestion); - } - } - } - - // see #3596 - public void testVeryLongInput() throws IOException { - assertAcked(client().admin().indices().prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .endObject() - .endObject().endObject() - .endObject()).get()); - // can cause stack overflow without the default max_input_length - String longString = replaceReservedChars(randomRealisticUnicodeOfLength(randomIntBetween(5000, 10000)), (char) 0x01); - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value(longString).endArray() - .field("output", "foobar") - .endObject().endObject() - ).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - - } - - // see #3648 - public void testReservedChars() throws IOException { - assertAcked(client().admin().indices().prepareCreate(INDEX).addMapping(TYPE, jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .endObject() - .endObject().endObject() - .endObject()).get()); - // can cause stack overflow without the default max_input_length - String string = "foo" + (char) 0x00 + "bar"; - try { - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject().startObject(FIELD) - .startArray("input").value(string).endArray() - .field("output", "foobar") - .endObject().endObject() - ).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - fail("expected MapperParsingException"); - } catch (MapperParsingException expected) {} - } - - // see #5930 - public void testIssue5930() throws IOException { - assertAcked(client().admin().indices().prepareCreate(INDEX).addMapping(TYPE, jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .endObject() - .endObject().endObject() - .endObject()).get()); - String string = "foo bar"; - client().prepareIndex(INDEX, TYPE, "1").setSource(jsonBuilder() - .startObject() - .field(FIELD, string) - .endObject() - ).setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - - try { - client().prepareSearch(INDEX).addAggregation(AggregationBuilders.terms("suggest_agg").field(FIELD) - .collectMode(randomFrom(SubAggCollectionMode.values()))).execute().actionGet(); - // Exception must be thrown - assertFalse(true); - } catch (SearchPhaseExecutionException e) { - assertTrue(e.toString().contains("Fielddata is not supported on field [" + FIELD + "] of type [completion]")); - } - } - - // see issue #6399 - public void testIndexingUnrelatedNullValue() throws Exception { - String mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .endObject() - .endObject() - .endObject() - .endObject() - .string(); - - assertAcked(client().admin().indices().prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, mapping).get()); - ensureGreen(); - - client().prepareIndex(INDEX, TYPE, "1").setSource(FIELD, "strings make me happy", FIELD + "_1", "nulls make me sad") - .setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - - try { - client().prepareIndex(INDEX, TYPE, "2").setSource(FIELD, null, FIELD + "_1", "nulls make me sad") - .setRefreshPolicy(RefreshPolicy.IMMEDIATE).get(); - fail("Expected MapperParsingException for null value"); - } catch (MapperParsingException e) { - // make sure that the exception has the name of the field causing the error - assertTrue(e.getDetailedMessage().contains(FIELD)); - } - - } - - private static String replaceReservedChars(String input, char replacement) { - char[] charArray = input.toCharArray(); - for (int i = 0; i < charArray.length; i++) { - if (CompletionFieldMapper2x.isReservedChar(charArray[i])) { - charArray[i] = replacement; - } - } - return new String(charArray); - } - - private static class CompletionMappingBuilder { - private String searchAnalyzer = "simple"; - private String indexAnalyzer = "simple"; - private Boolean payloads = getRandom().nextBoolean(); - private Boolean preserveSeparators = getRandom().nextBoolean(); - private Boolean preservePositionIncrements = getRandom().nextBoolean(); - - public CompletionMappingBuilder searchAnalyzer(String searchAnalyzer) { - this.searchAnalyzer = searchAnalyzer; - return this; - } - public CompletionMappingBuilder indexAnalyzer(String indexAnalyzer) { - this.indexAnalyzer = indexAnalyzer; - return this; - } - public CompletionMappingBuilder payloads(Boolean payloads) { - this.payloads = payloads; - return this; - } - public CompletionMappingBuilder preserveSeparators(Boolean preserveSeparators) { - this.preserveSeparators = preserveSeparators; - return this; - } - public CompletionMappingBuilder preservePositionIncrements(Boolean preservePositionIncrements) { - this.preservePositionIncrements = preservePositionIncrements; - return this; - } - } -} diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 14fab9d72b2..74920fb8fc7 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -24,17 +24,14 @@ import com.carrotsearch.randomizedtesting.generators.RandomStrings; import org.apache.lucene.analysis.TokenStreamToAutomaton; import org.apache.lucene.search.suggest.document.ContextSuggestField; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.ReduceSearchPhaseException; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -52,13 +49,11 @@ import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.GeoContextMapping; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; @@ -162,58 +157,6 @@ public class CompletionSuggestSearchIT extends ESIntegTestCase { assertSuggestions("foo", prefix, "sugxgestion10", "sugxgestion9", "sugxgestion8", "sugxgestion7", "sugxgestion6"); } - public void testMixedCompletion() throws Exception { - final CompletionMappingBuilder mapping = new CompletionMappingBuilder(); - createIndexAndMapping(mapping); - String otherIndex = INDEX + "_1"; - assertAcked(client().admin().indices().prepareCreate(otherIndex) - .setSettings(Settings.builder().put(indexSettings()).put(IndexMetaData.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_2_3_1).id)) - .addMapping(TYPE, jsonBuilder().startObject() - .startObject(TYPE).startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .field("analyzer", mapping.indexAnalyzer) - .field("search_analyzer", mapping.searchAnalyzer) - .field("preserve_separators", mapping.preserveSeparators) - .field("preserve_position_increments", mapping.preservePositionIncrements) - .endObject() - .endObject().endObject() - .endObject()) - .get()); - int numDocs = 10; - List indexRequestBuilders = new ArrayList<>(); - for (int i = 1; i <= numDocs; i++) { - indexRequestBuilders.add(client().prepareIndex(otherIndex, TYPE, "" + i) - .setSource(jsonBuilder() - .startObject() - .startObject(FIELD) - .field("input", "suggestion" + i) - .field("weight", i) - .endObject() - .endObject() - )); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i) - .setSource(jsonBuilder() - .startObject() - .startObject(FIELD) - .field("input", "suggestion" + i) - .field("weight", i) - .endObject() - .endObject() - )); - } - indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).text("sugg"); - try { - client().prepareSearch(INDEX, otherIndex).suggest(new SuggestBuilder().addSuggestion("foo", prefix)) - .execute().actionGet(); - fail("querying on mixed completion suggester should throw an error"); - } catch (ReduceSearchPhaseException e) { - assertThat(e.getCause().getMessage(), containsString("detected mixed suggestion results")); - } - } - public void testEarlyTermination() throws Exception { final CompletionMappingBuilder mapping = new CompletionMappingBuilder(); createIndexAndMapping(mapping); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearch2xIT.java b/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearch2xIT.java deleted file mode 100644 index 50733f10838..00000000000 --- a/core/src/test/java/org/elasticsearch/search/suggest/ContextSuggestSearch2xIT.java +++ /dev/null @@ -1,1096 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.search.suggest; - -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.Fuzziness; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.mapper.MapperParsingException; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.suggest.Suggest.Suggestion; -import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry; -import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry.Option; -import org.elasticsearch.search.suggest.completion.CompletionSuggestionBuilder; -import org.elasticsearch.search.suggest.completion2x.CompletionSuggestion; -import org.elasticsearch.search.suggest.completion2x.context.ContextBuilder; -import org.elasticsearch.search.suggest.completion2x.context.ContextMapping; -import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.VersionUtils; -import org.hamcrest.Matchers; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static com.carrotsearch.randomizedtesting.RandomizedTest.getRandom; -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestion; -import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertDistance; -import static org.hamcrest.Matchers.containsString; - -@SuppressCodecs("*") // requires custom completion format -public class ContextSuggestSearch2xIT extends ESIntegTestCase { - - private static final String INDEX = "test"; - private static final String TYPE = "testType"; - private static final String FIELD = "testField"; - private final Version PRE2X_VERSION = VersionUtils.randomVersionBetween(getRandom(), Version.V_2_0_0, Version.V_2_3_1); - - private static final String[][] HEROS = { - { "Afari, Jamal", "Jamal Afari", "Jamal" }, - { "Allerdyce, St. John", "Allerdyce, John", "St. John", "St. John Allerdyce" }, - { "Beaubier, Jean-Paul", "Jean-Paul Beaubier", "Jean-Paul" }, - { "Beaubier, Jeanne-Marie", "Jeanne-Marie Beaubier", "Jeanne-Marie" }, - { "Braddock, Elizabeth \"Betsy\"", "Betsy", "Braddock, Elizabeth", "Elizabeth Braddock", "Elizabeth" }, - { "Cody Mushumanski gun Man", "the hunter", "gun man", "Cody Mushumanski" }, - { "Corbo, Adrian", "Adrian Corbo", "Adrian" }, - { "Corbo, Jared", "Jared Corbo", "Jared" }, - { "Creel, Carl \"Crusher\"", "Creel, Carl", "Crusher", "Carl Creel", "Carl" }, - { "Crichton, Lady Jacqueline Falsworth", "Lady Jacqueline Falsworth Crichton", "Lady Jacqueline Falsworth", - "Jacqueline Falsworth" }, { "Crichton, Kenneth", "Kenneth Crichton", "Kenneth" }, - { "MacKenzie, Al", "Al MacKenzie", "Al" }, - { "MacPherran, Mary \"Skeeter\"", "Mary MacPherran \"Skeeter\"", "MacPherran, Mary", "Skeeter", "Mary MacPherran" }, - { "MacTaggert, Moira", "Moira MacTaggert", "Moira" }, { "Rasputin, Illyana", "Illyana Rasputin", "Illyana" }, - { "Rasputin, Mikhail", "Mikhail Rasputin", "Mikhail" }, { "Rasputin, Piotr", "Piotr Rasputin", "Piotr" }, - { "Smythe, Alistair", "Alistair Smythe", "Alistair" }, { "Smythe, Spencer", "Spencer Smythe", "Spencer" }, - { "Whitemane, Aelfyre", "Aelfyre Whitemane", "Aelfyre" }, { "Whitemane, Kofi", "Kofi Whitemane", "Kofi" } }; - - @Override - protected Collection> nodePlugins() { - return Arrays.asList(InternalSettingsPlugin.class); - } - - public void testBasicGeo() throws Exception { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.location("st").precision("5km").neighbors(true)))); - - XContentBuilder source1 = jsonBuilder() - .startObject() - .startObject(FIELD) - .array("input", "Hotel Amsterdam", "Amsterdam") - .field("output", "Hotel Amsterdam in Berlin") - .startObject("context").latlon("st", 52.529172, 13.407333).endObject() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "1").setSource(source1).execute().actionGet(); - - XContentBuilder source2 = jsonBuilder() - .startObject() - .startObject(FIELD) - .array("input", "Hotel Berlin", "Berlin") - .field("output", "Hotel Berlin in Amsterdam") - .startObject("context").latlon("st", 52.363389, 4.888695).endObject() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "2").setSource(source2).execute().actionGet(); - - client().admin().indices().prepareRefresh(INDEX).get(); - - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text("h").size(10).contexts( - new CompletionSuggestionBuilder.Contexts2x().addGeoLocation("st", 52.52, 13.4)); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)).execute().actionGet(); - - assertEquals(suggestResponse.getSuggest().size(), 1); - assertEquals("Hotel Amsterdam in Berlin", suggestResponse.getSuggest() - .getSuggestion(suggestionName).iterator().next().getOptions().iterator().next().getText().string()); - } - - public void testMultiLevelGeo() throws Exception { - assertAcked(prepareCreate(INDEX).setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.location("st") - .precision(1) - .precision(2) - .precision(3) - .precision(4) - .precision(5) - .precision(6) - .precision(7) - .precision(8) - .precision(9) - .precision(10) - .precision(11) - .precision(12) - .neighbors(true)))); - - XContentBuilder source1 = jsonBuilder() - .startObject() - .startObject(FIELD) - .array("input", "Hotel Amsterdam", "Amsterdam") - .field("output", "Hotel Amsterdam in Berlin") - .startObject("context").latlon("st", 52.529172, 13.407333).endObject() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "1").setSource(source1).execute().actionGet(); - - client().admin().indices().prepareRefresh(INDEX).get(); - - for (int precision = 1; precision <= 12; precision++) { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = new CompletionSuggestionBuilder(FIELD).text("h").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addGeoLocation("st", 52.529172, 13.407333, precision)); - - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)).execute().actionGet(); - assertEquals(suggestResponse.getSuggest().size(), 1); - assertEquals("Hotel Amsterdam in Berlin", suggestResponse.getSuggest().getSuggestion(suggestionName).iterator().next() - .getOptions().iterator().next().getText().string()); - } - } - - public void testMappingIdempotency() throws Exception { - final int nPrecision = randomIntBetween(4, 12); - List precisions = new ArrayList<>(nPrecision); - for (int i = 0; i < nPrecision; i++) { - precisions.add(i+1); - } - Collections.shuffle(precisions, getRandom()); - int[] precision = new int[nPrecision]; - for (int i = 0; i < precision.length; i++) { - precision[i] = precisions.get(i); - } - XContentBuilder mapping = jsonBuilder().startObject().startObject(TYPE) - .startObject("properties").startObject("completion") - .field("type", "completion") - .startObject("context") - .startObject("location") - .field("type", "geo") - .array("precision", precision) - .endObject() - .endObject() - .endObject().endObject() - .endObject().endObject(); - - assertAcked(prepareCreate(INDEX).setSettings( - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)).addMapping(TYPE, mapping.string())); - - Collections.shuffle(precisions, getRandom()); - for (int i = 0; i < precision.length; i++) { - precision[i] = precisions.get(i); - } - mapping = jsonBuilder().startObject().startObject(TYPE) - .startObject("properties").startObject("completion") - .field("type", "completion") - .startObject("context") - .startObject("location") - .field("type", "geo") - .array("precision", precision) - .endObject() - .endObject() - .endObject().endObject() - .endObject().endObject(); - assertAcked(client().admin().indices().preparePutMapping(INDEX).setType(TYPE).setSource(mapping.string()).get()); - } - - - public void testGeoField() throws Exception { - - XContentBuilder mapping = jsonBuilder(); - mapping.startObject(); - mapping.startObject(TYPE); - mapping.startObject("properties"); - mapping.startObject("pin"); - mapping.field("type", "geo_point"); - mapping.endObject(); - mapping.startObject(FIELD); - mapping.field("type", "completion"); - mapping.field("analyzer", "simple"); - - mapping.startObject("context"); - mapping.value(ContextBuilder.location("st", 5, true).field("pin").build()); - mapping.endObject(); - - mapping.endObject(); - mapping.endObject(); - mapping.endObject(); - mapping.endObject(); - - assertAcked(prepareCreate(INDEX).setSettings( - Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id) - ).addMapping(TYPE, mapping)); - - XContentBuilder source1 = jsonBuilder() - .startObject() - .latlon("pin", 52.529172, 13.407333) - .startObject(FIELD) - .array("input", "Hotel Amsterdam", "Amsterdam") - .field("output", "Hotel Amsterdam in Berlin") - .startObject("context").endObject() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "1").setSource(source1).execute().actionGet(); - - XContentBuilder source2 = jsonBuilder() - .startObject() - .latlon("pin", 52.363389, 4.888695) - .startObject(FIELD) - .array("input", "Hotel Berlin", "Berlin") - .field("output", "Hotel Berlin in Amsterdam") - .startObject("context").endObject() - .endObject() - .endObject(); - client().prepareIndex(INDEX, TYPE, "2").setSource(source2).execute().actionGet(); - - refresh(); - - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text("h").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x() - .addGeoLocation("st", 52.52, 13.4)); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)).execute().actionGet(); - - assertEquals(suggestResponse.getSuggest().size(), 1); - assertEquals("Hotel Amsterdam in Berlin", suggestResponse.getSuggest() - .getSuggestion(suggestionName).iterator().next().getOptions().iterator().next().getText().string()); - } - - public void testSimpleGeo() throws Exception { - String reinickendorf = "u337p3mp11e2"; - String pankow = "u33e0cyyjur4"; - String koepenick = "u33dm4f7fn40"; - String bernau = "u33etnjf1yjn"; - String berlin = "u33dc1v0xupz"; - String mitte = "u33dc0cpke4q"; - String steglitz = "u336m36rjh2p"; - String wilmersdorf = "u336wmw0q41s"; - String spandau = "u336uqek7gh6"; - String tempelhof = "u33d91jh3by0"; - String schoeneberg = "u336xdrkzbq7"; - String treptow = "u33d9unn7fp7"; - - double precision = 100.0; // meters - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.location("st").precision(precision).neighbors(true)))); - - String[] locations = { reinickendorf, pankow, koepenick, bernau, berlin, mitte, steglitz, wilmersdorf, spandau, tempelhof, - schoeneberg, treptow }; - - String[][] input = { { "pizza - reinickendorf", "pizza", "food" }, { "pizza - pankow", "pizza", "food" }, - { "pizza - koepenick", "pizza", "food" }, { "pizza - bernau", "pizza", "food" }, { "pizza - berlin", "pizza", "food" }, - { "pizza - mitte", "pizza - berlin mitte", "pizza", "food" }, - { "pizza - steglitz", "pizza - Berlin-Steglitz", "pizza", "food" }, { "pizza - wilmersdorf", "pizza", "food" }, - { "pizza - spandau", "spandau bei berlin", "pizza", "food" }, - { "pizza - tempelhof", "pizza - berlin-tempelhof", "pizza", "food" }, - { "pizza - schoeneberg", "pizza - schöneberg", "pizza - berlin schoeneberg", "pizza", "food" }, - { "pizza - treptow", "pizza", "food" } }; - - for (int i = 0; i < locations.length; i++) { - XContentBuilder source = jsonBuilder().startObject().startObject(FIELD).array("input", input[i]) - .startObject("context").field("st", locations[i]).endObject().field("payload", locations[i]) - .endObject().endObject(); - client().prepareIndex(INDEX, TYPE, "" + i).setSource(source).execute().actionGet(); - } - - refresh(); - - assertGeoSuggestionsInRange(berlin, "pizza", precision); - assertGeoSuggestionsInRange(reinickendorf, "pizza", precision); - assertGeoSuggestionsInRange(spandau, "pizza", precision); - assertGeoSuggestionsInRange(koepenick, "pizza", precision); - assertGeoSuggestionsInRange(schoeneberg, "pizza", precision); - assertGeoSuggestionsInRange(tempelhof, "pizza", precision); - assertGeoSuggestionsInRange(bernau, "pizza", precision); - assertGeoSuggestionsInRange(pankow, "pizza", precision); - assertGeoSuggestionsInRange(mitte, "pizza", precision); - assertGeoSuggestionsInRange(steglitz, "pizza", precision); - assertGeoSuggestionsInRange(mitte, "pizza", precision); - assertGeoSuggestionsInRange(wilmersdorf, "pizza", precision); - assertGeoSuggestionsInRange(treptow, "pizza", precision); - } - - public void testSimplePrefix() throws Exception { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.category("st")))); - - for (int i = 0; i < HEROS.length; i++) { - XContentBuilder source = jsonBuilder().startObject().startObject(FIELD).array("input", HEROS[i]) - .startObject("context").field("st", i%3).endObject() - .startObject("payload").field("group", i % 3).field("id", i).endObject() - .endObject().endObject(); - client().prepareIndex(INDEX, TYPE, "" + i).setSource(source).execute().actionGet(); - } - - refresh(); - - assertPrefixSuggestions(0, "a", "Afari, Jamal", "Adrian Corbo", "Adrian"); - assertPrefixSuggestions(0, "b", "Beaubier, Jeanne-Marie"); - assertPrefixSuggestions(0, "c", "Corbo, Adrian", "Crichton, Lady Jacqueline Falsworth"); - assertPrefixSuggestions(0, "mary", "Mary MacPherran \"Skeeter\"", "Mary MacPherran"); - assertPrefixSuggestions(0, "s", "Skeeter", "Smythe, Spencer", "Spencer Smythe", "Spencer"); - assertPrefixSuggestions(1, "s", "St. John", "St. John Allerdyce"); - assertPrefixSuggestions(2, "s", "Smythe, Alistair"); - assertPrefixSuggestions(1, "w", "Whitemane, Aelfyre"); - assertPrefixSuggestions(2, "w", "Whitemane, Kofi"); - } - - public void testTypeCategoryIsActuallyCalledCategory() throws Exception { - XContentBuilder mapping = jsonBuilder(); - mapping.startObject().startObject(TYPE).startObject("properties") - .startObject("suggest_field").field("type", "completion") - .startObject("context").startObject("color").field("type", "category").endObject().endObject() - .endObject() - .endObject().endObject().endObject(); - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, mapping)); - XContentBuilder doc1 = jsonBuilder(); - doc1.startObject().startObject("suggest_field") - .field("input", "backpack_red") - .startObject("context").array("color", "red", "all_colors").endObject() - .endObject().endObject(); - XContentBuilder doc2 = jsonBuilder(); - doc2.startObject().startObject("suggest_field") - .field("input", "backpack_green") - .startObject("context").array("color", "green", "all_colors").endObject() - .endObject().endObject(); - - client().prepareIndex(INDEX, TYPE, "1") - .setSource(doc1).execute() - .actionGet(); - client().prepareIndex(INDEX, TYPE, "2") - .setSource(doc2).execute() - .actionGet(); - - refresh(); - getBackpackSuggestionAndCompare("all_colors", "backpack_red", "backpack_green"); - getBackpackSuggestionAndCompare("red", "backpack_red"); - getBackpackSuggestionAndCompare("green", "backpack_green"); - getBackpackSuggestionAndCompare("not_existing_color"); - - } - - private void getBackpackSuggestionAndCompare(String contextValue, String... expectedText) { - Set expected = new HashSet<>(); - Collections.addAll(expected, expectedText); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion("suggest_field") - .text("back").size(10).contexts( - new CompletionSuggestionBuilder.Contexts2x().addContextField("color", contextValue)); - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion("suggestion", context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - Suggest suggest = suggestResponse.getSuggest(); - assertEquals(suggest.size(), 1); - for (Suggestion> s : suggest) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - assertEquals(options.size(), expectedText.length); - for (CompletionSuggestion.Entry.Option option : options) { - assertTrue(expected.contains(option.getText().string())); - expected.remove(option.getText().string()); - } - } - } - } - - public void testBasic() throws Exception { - assertAcked(prepareCreate(INDEX). - setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, false, ContextBuilder.reference("st", "_type"), - ContextBuilder.reference("nd", "_type")))); - - client().prepareIndex(INDEX, TYPE, "1") - .setSource( - jsonBuilder().startObject().startObject(FIELD) - .startArray("input").value("my hotel").value("this hotel").endArray() - .startObject("context").endObject() - .field("payload", TYPE + "|" + TYPE).endObject().endObject()).execute() - .actionGet(); - - refresh(); - - assertDoubleFieldSuggestions(TYPE, TYPE, "m", "my hotel"); - } - - public void testSimpleField() throws Exception { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.reference("st", "category")))); - - for (int i = 0; i < HEROS.length; i++) { - client().prepareIndex(INDEX, TYPE, "" + i) - .setSource( - jsonBuilder().startObject().field("category", Integer.toString(i % 3)).startObject(FIELD) - .array("input", HEROS[i]) - .startObject("context").endObject().field("payload", Integer.toString(i % 3)) - .endObject().endObject()).execute().actionGet(); - } - - refresh(); - - assertFieldSuggestions("0", "a", "Afari, Jamal", "Adrian Corbo", "Adrian"); - assertFieldSuggestions("0", "b", "Beaubier, Jeanne-Marie"); - assertFieldSuggestions("0", "c", "Corbo, Adrian", "Crichton, Lady Jacqueline Falsworth"); - assertFieldSuggestions("0", "mary", "Mary MacPherran \"Skeeter\"", "Mary MacPherran"); - assertFieldSuggestions("0", "s", "Skeeter", "Smythe, Spencer", "Spencer Smythe", "Spencer"); - assertFieldSuggestions("1", "s", "St. John", "St. John Allerdyce"); - assertFieldSuggestions("2", "s", "Smythe, Alistair"); - assertFieldSuggestions("1", "w", "Whitemane, Aelfyre"); - assertFieldSuggestions("2", "w", "Whitemane, Kofi"); - - } - - // see issue #10987 - public void testEmptySuggestion() throws Exception { - String mapping = jsonBuilder() - .startObject() - .startObject(TYPE) - .startObject("properties") - .startObject(FIELD) - .field("type", "completion") - .startObject("context") - .startObject("type_context") - .field("path", "_type") - .field("type", "category") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - .string(); - - assertAcked(client().admin().indices().prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, mapping).get()); - ensureGreen(); - - client().prepareIndex(INDEX, TYPE, "1").setSource(FIELD, "") - .setRefreshPolicy(IMMEDIATE).get(); - - } - - public void testMultiValueField() throws Exception { - assertAcked(prepareCreate(INDEX).setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.reference("st", "category")))); - - for (int i = 0; i < HEROS.length; i++) { - client().prepareIndex(INDEX, TYPE, "" + i) - .setSource( - jsonBuilder().startObject().startArray("category").value(Integer.toString(i % 3)).value("other").endArray() - .startObject(FIELD).array("input", HEROS[i]).startObject("context").endObject() - .field("payload", Integer.toString(i % 3)).endObject().endObject()).execute().actionGet(); - } - - refresh(); - - assertFieldSuggestions("0", "a", "Afari, Jamal", "Adrian Corbo", "Adrian"); - assertFieldSuggestions("0", "b", "Beaubier, Jeanne-Marie"); - assertFieldSuggestions("0", "c", "Corbo, Adrian", "Crichton, Lady Jacqueline Falsworth"); - assertFieldSuggestions("0", "mary", "Mary MacPherran \"Skeeter\"", "Mary MacPherran"); - assertFieldSuggestions("0", "s", "Skeeter", "Smythe, Spencer", "Spencer Smythe", "Spencer"); - assertFieldSuggestions("1", "s", "St. John", "St. John Allerdyce"); - assertFieldSuggestions("2", "s", "Smythe, Alistair"); - assertFieldSuggestions("1", "w", "Whitemane, Aelfyre"); - assertFieldSuggestions("2", "w", "Whitemane, Kofi"); - } - - public void testMultiContext() throws Exception { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, createMapping(TYPE, ContextBuilder.reference("st", "categoryA"), - ContextBuilder.reference("nd", "categoryB")))); - - for (int i = 0; i < HEROS.length; i++) { - client().prepareIndex(INDEX, TYPE, "" + i) - .setSource( - jsonBuilder().startObject().field("categoryA").value("" + (char) ('0' + (i % 3))).field("categoryB") - .value("" + (char) ('A' + (i % 3))).startObject(FIELD).array("input", HEROS[i]) - .startObject("context").endObject().field("payload", ((char) ('0' + (i % 3))) + "" + (char) ('A' + (i % 3))) - .endObject().endObject()).execute().actionGet(); - } - - refresh(); - - assertMultiContextSuggestions("0", "A", "a", "Afari, Jamal", "Adrian Corbo", "Adrian"); - assertMultiContextSuggestions("0", "A", "b", "Beaubier, Jeanne-Marie"); - assertMultiContextSuggestions("0", "A", "c", "Corbo, Adrian", "Crichton, Lady Jacqueline Falsworth"); - assertMultiContextSuggestions("0", "A", "mary", "Mary MacPherran \"Skeeter\"", "Mary MacPherran"); - assertMultiContextSuggestions("0", "A", "s", "Skeeter", "Smythe, Spencer", "Spencer Smythe", "Spencer"); - assertMultiContextSuggestions("1", "B", "s", "St. John", "St. John Allerdyce"); - assertMultiContextSuggestions("2", "C", "s", "Smythe, Alistair"); - assertMultiContextSuggestions("1", "B", "w", "Whitemane, Aelfyre"); - assertMultiContextSuggestions("2", "C", "w", "Whitemane, Kofi"); - } - - public void testMultiContextWithFuzzyLogic() throws Exception { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping(TYPE, - createMapping(TYPE, ContextBuilder.reference("st", "categoryA"), ContextBuilder.reference("nd", "categoryB")))); - - for (int i = 0; i < HEROS.length; i++) { - String source = jsonBuilder().startObject().field("categoryA", "" + (char) ('0' + (i % 3))) - .field("categoryB", "" + (char) ('a' + (i % 3))).startObject(FIELD).array("input", HEROS[i]) - .startObject("context").endObject().startObject("payload").field("categoryA", "" + (char) ('0' + (i % 3))) - .field("categoryB", "" + (char) ('a' + (i % 3))).endObject().endObject().endObject().string(); - client().prepareIndex(INDEX, TYPE, "" + i).setSource(source).execute().actionGet(); - } - - refresh(); - - String[] prefix1 = { "0", "1", "2" }; - String[] prefix2 = { "a", "b", "c" }; - String[] prefix3 = { "0", "1" }; - String[] prefix4 = { "a", "b" }; - - assertContextWithFuzzySuggestions(prefix1, prefix2, "mary", "MacKenzie, Al", "MacPherran, Mary", "MacPherran, Mary \"Skeeter\"", - "MacTaggert, Moira", "Mary MacPherran", "Mary MacPherran \"Skeeter\""); - assertContextWithFuzzySuggestions(prefix1, prefix2, "mac", "Mikhail", "Mary MacPherran \"Skeeter\"", "MacTaggert, Moira", - "Moira MacTaggert", "Moira", "MacKenzie, Al", "Mary MacPherran", "Mikhail Rasputin", "MacPherran, Mary", - "MacPherran, Mary \"Skeeter\""); - assertContextWithFuzzySuggestions(prefix3, prefix4, "mary", "MacPherran, Mary", "MacPherran, Mary \"Skeeter\"", - "MacTaggert, Moira", "Mary MacPherran", "Mary MacPherran \"Skeeter\""); - assertContextWithFuzzySuggestions(prefix3, prefix4, "mac", "MacPherran, Mary", "MacPherran, Mary \"Skeeter\"", "MacTaggert, Moira", - "Mary MacPherran", "Mary MacPherran \"Skeeter\"", "Mikhail", "Mikhail Rasputin", "Moira", "Moira MacTaggert"); - } - - public void testSimpleType() throws Exception { - String[] types = { TYPE + "A", TYPE + "B", TYPE + "C" }; - - CreateIndexRequestBuilder createIndexRequestBuilder = prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)); - for (String type : types) { - createIndexRequestBuilder.addMapping(type, createMapping(type, ContextBuilder.reference("st", "_type"))); - } - assertAcked(createIndexRequestBuilder); - - for (int i = 0; i < HEROS.length; i++) { - String type = types[i % types.length]; - client().prepareIndex(INDEX, type, "" + i) - .setSource( - jsonBuilder().startObject().startObject(FIELD).array("input", HEROS[i]) - .startObject("context").endObject().field("payload", type).endObject().endObject()).execute().actionGet(); - } - - refresh(); - - assertFieldSuggestions(types[0], "a", "Afari, Jamal", "Adrian Corbo", "Adrian"); - assertFieldSuggestions(types[0], "b", "Beaubier, Jeanne-Marie"); - assertFieldSuggestions(types[0], "c", "Corbo, Adrian", "Crichton, Lady Jacqueline Falsworth"); - assertFieldSuggestions(types[0], "mary", "Mary MacPherran \"Skeeter\"", "Mary MacPherran"); - assertFieldSuggestions(types[0], "s", "Skeeter", "Smythe, Spencer", "Spencer Smythe", "Spencer"); - assertFieldSuggestions(types[1], "s", "St. John", "St. John Allerdyce"); - assertFieldSuggestions(types[2], "s", "Smythe, Alistair"); - assertFieldSuggestions(types[1], "w", "Whitemane, Aelfyre"); - assertFieldSuggestions(types[2], "w", "Whitemane, Kofi"); - } - - // issue 5525, default location didnt work with lat/lon map, and did not set default location appropriately - public void testGeoContextDefaultMapping() throws Exception { - GeoPoint berlinAlexanderplatz = GeoPoint.fromGeohash("u33dc1"); - - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("poi").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("location") - .field("type", "geo") - .field("precision", "500m") - .startObject("default").field("lat", berlinAlexanderplatz.lat()).field("lon", berlinAlexanderplatz.lon()).endObject() - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("poi", xContentBuilder)); - - index(INDEX, "poi", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Berlin Alexanderplatz").endObject().endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest").text("b").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addGeoLocation("location", - berlinAlexanderplatz.lat(), berlinAlexanderplatz.lon())); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, "Berlin Alexanderplatz"); - } - - // issue 5525, setting the path of a category context and then indexing a document without that field returned an error - public void testThatMissingPrefixesForContextReturnException() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("service").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("color") - .field("type", "category") - .field("path", "color") - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("service", xContentBuilder)); - - // now index a document with color field - index(INDEX, "service", "1", jsonBuilder().startObject() - .field("color", "red").startObject("suggest").field("input", "backback").endObject().endObject()); - - // now index a document without a color field - try { - index(INDEX, "service", "2", jsonBuilder().startObject() - .startObject("suggest").field("input", "backback").endObject().endObject()); - fail("index operation was not supposed to be successful"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("one or more prefixes needed")); - } - } - - public void testThatCategoryDefaultWorks() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("color") - .field("type", "category").field("default", "red") - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - - index(INDEX, "item", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Hoodie red").endObject().endObject()); - index(INDEX, "item", "2", jsonBuilder().startObject() - .startObject("suggest").field("input", "Hoodie blue").startObject("context").field("color", "blue") - .endObject().endObject().endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest").text("h").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addContextField("color", "red")); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, "Hoodie red"); - } - - public void testThatDefaultCategoryAndPathWorks() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("color") - .field("type", "category") - .field("default", "red") - .field("path", "color") - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - - index(INDEX, "item", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Hoodie red").endObject().endObject()); - index(INDEX, "item", "2", jsonBuilder().startObject() - .startObject("suggest").field("input", "Hoodie blue").endObject().field("color", "blue").endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest").text("h").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addContextField("color", "red")); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, "Hoodie red"); - } - - public void testThatGeoPrecisionIsWorking() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("location") - .field("type", "geo") - .field("precision", 4) // this means geo hashes with a length of four are used, like u345 - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - - // lets create some locations by geohashes in different cells with the precision 4 - // this means, that poelchaustr is not a neighour to alexanderplatz, but they share the same prefix until the fourth char! - GeoPoint alexanderplatz = GeoPoint.fromGeohash("u33dc1"); - GeoPoint poelchaustr = GeoPoint.fromGeohash("u33du5"); - GeoPoint dahlem = GeoPoint.fromGeohash("u336q"); // berlin dahlem, should be included with that precision - GeoPoint middleOfNoWhere = GeoPoint.fromGeohash("u334"); // location for west from berlin, should not be included - - index(INDEX, "item", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Berlin Alexanderplatz").field("weight", 3).startObject("context") - .startObject("location").field("lat", alexanderplatz.lat()).field("lon", alexanderplatz.lon()).endObject() - .endObject().endObject().endObject()); - index(INDEX, "item", "2", jsonBuilder().startObject().startObject("suggest").field("input", "Berlin Poelchaustr.") - .field("weight", 2).startObject("context").startObject("location").field("lat", poelchaustr.lat()) - .field("lon", poelchaustr.lon()).endObject().endObject().endObject().endObject()); - index(INDEX, "item", "3", jsonBuilder().startObject().startObject("suggest").field("input", "Berlin Far Away") - .field("weight", 1).startObject("context").startObject("location").field("lat", middleOfNoWhere.lat()) - .field("lon", middleOfNoWhere.lon()).endObject().endObject().endObject().endObject()); - index(INDEX, "item", "4", jsonBuilder().startObject().startObject("suggest") - .field("input", "Berlin Dahlem").field("weight", 1).startObject("context").startObject("location") - .field("lat", dahlem.lat()).field("lon", dahlem.lon()).endObject().endObject().endObject().endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest") - .text("b").size(10).contexts(new CompletionSuggestionBuilder.Contexts2x().addGeoLocation("location", - alexanderplatz.lat(), alexanderplatz.lon())); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, - "Berlin Alexanderplatz", "Berlin Poelchaustr.", "Berlin Dahlem"); - } - - public void testThatNeighborsCanBeExcluded() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("location") - .field("type", "geo") - .field("precision", 6) - .field("neighbors", false) - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - - GeoPoint alexanderplatz = GeoPoint.fromGeohash("u33dc1"); - // does not look like it, but is a direct neighbor - // this test would fail, if the precision was set 4, as then both cells would be the same, u33d - GeoPoint cellNeighbourOfAlexanderplatz = GeoPoint.fromGeohash("u33dbc"); - - index(INDEX, "item", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Berlin Alexanderplatz") - .field("weight", 3).startObject("context").startObject("location") - .field("lat", alexanderplatz.lat()).field("lon", alexanderplatz.lon()) - .endObject().endObject().endObject().endObject()); - index(INDEX, "item", "2", jsonBuilder().startObject() - .startObject("suggest").field("input", "Berlin Hackescher Markt") - .field("weight", 2).startObject("context").startObject("location") - .field("lat", cellNeighbourOfAlexanderplatz.lat()).field("lon", cellNeighbourOfAlexanderplatz.lon()) - .endObject().endObject().endObject().endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest").text("b").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x() - .addGeoLocation("location", alexanderplatz.lat(), alexanderplatz.lon())); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, "Berlin Alexanderplatz"); - } - - public void testThatGeoPathCanBeSelected() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("location") - .field("type", "geo") - .field("precision", "5m") - .field("path", "loc") - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - - GeoPoint alexanderplatz = GeoPoint.fromGeohash("u33dc1"); - index(INDEX, "item", "1", jsonBuilder().startObject() - .startObject("suggest").field("input", "Berlin Alexanderplatz").endObject() - .startObject("loc").field("lat", alexanderplatz.lat()).field("lon", alexanderplatz.lon()).endObject() - .endObject()); - refresh(); - - final String suggestionName = "suggestion"; - CompletionSuggestionBuilder suggestionBuilder = SuggestBuilders.completionSuggestion("suggest").text("b").size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x() - .addGeoLocation("location", alexanderplatz.lat(), alexanderplatz.lon())); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, suggestionBuilder)).get(); - assertSuggestion(suggestResponse.getSuggest(), 0, suggestionName, "Berlin Alexanderplatz"); - } - - public void testThatPrecisionIsRequired() throws Exception { - XContentBuilder xContentBuilder = jsonBuilder().startObject() - .startObject("item").startObject("properties").startObject("suggest") - .field("type", "completion") - .startObject("context").startObject("location") - .field("type", "geo") - .field("path", "loc") - .endObject().endObject() - .endObject().endObject().endObject() - .endObject(); - - try { - assertAcked(prepareCreate(INDEX) - .setSettings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, PRE2X_VERSION.id)) - .addMapping("item", xContentBuilder)); - fail("expected MapperParsingException"); - } catch (MapperParsingException expected) {} - } - - public void assertGeoSuggestionsInRange(String location, String suggest, double precision) throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text(suggest).size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addGeoLocation("st", location)); - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX) - .suggest(new SuggestBuilder().addSuggestion(suggestionName, context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - - Suggest suggest2 = suggestResponse.getSuggest(); - assertTrue(suggest2.iterator().hasNext()); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - assertTrue(suggestion.iterator().hasNext()); - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - assertTrue(options.iterator().hasNext()); - for (CompletionSuggestion.Entry.Option option : options) { - String target = option.getPayloadAsString(); - assertDistance(location, target, Matchers.lessThanOrEqualTo(precision)); - } - } - } - } - - public void assertPrefixSuggestions(long prefix, String suggest, String... hits) throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text(suggest) - .size(hits.length + 1).contexts(new CompletionSuggestionBuilder.Contexts2x() - .addCategory("st", Long.toString(prefix))); - SearchResponse suggestResponse = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)).execute().actionGet(); - ArrayList suggestions = new ArrayList<>(); - Suggest suggest2 = suggestResponse.getSuggest(); - assertTrue(suggest2.iterator().hasNext()); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - for (CompletionSuggestion.Entry.Option option : options) { - Map payload = option.getPayloadAsMap(); - int group = (Integer) payload.get("group"); - String text = option.getText().string(); - assertEquals(prefix, group); - suggestions.add(text); - } - } - } - assertSuggestionsMatch(suggestions, hits); - } - - public void assertContextWithFuzzySuggestions(String[] prefix1, String[] prefix2, String suggest, String... hits) - throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).prefix(suggest, Fuzziness.TWO) - .size(hits.length + 10).contexts( - new CompletionSuggestionBuilder.Contexts2x().addContextField("st", prefix1).addContextField("nd", prefix2)); - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - - ArrayList suggestions = new ArrayList<>(); - - Suggest suggest2 = suggestResponse.getSuggest(); - assertTrue(suggest2.iterator().hasNext()); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - for (CompletionSuggestion.Entry.Option option : options) { - Map payload = option.getPayloadAsMap(); - String text = option.getText().string(); - assertThat(prefix1, Matchers.hasItemInArray(payload.get("categoryA"))); - assertThat(prefix2, Matchers.hasItemInArray(payload.get("categoryB"))); - suggestions.add(text); - } - } - } - - assertSuggestionsMatch(suggestions, hits); - } - - public void assertFieldSuggestions(String value, String suggest, String... hits) throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text(suggest).size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x().addContextField("st", value)); - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - - ArrayList suggestions = new ArrayList<>(); - - Suggest suggest2 = suggestResponse.getSuggest(); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - for (CompletionSuggestion.Entry.Option option : options) { - String payload = option.getPayloadAsString(); - String text = option.getText().string(); - assertEquals(value, payload); - suggestions.add(text); - } - } - } - assertSuggestionsMatch(suggestions, hits); - } - - public void assertDoubleFieldSuggestions(String field1, String field2, String suggest, String... hits) throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text(suggest).size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x() - .addContextField("st", field1).addContextField("nd", field2)); - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - ArrayList suggestions = new ArrayList<>(); - - Suggest suggest2 = suggestResponse.getSuggest(); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - for (CompletionSuggestion.Entry.Option option : options) { - String payload = option.getPayloadAsString(); - String text = option.getText().string(); - assertEquals(field1 + "|" + field2, payload); - suggestions.add(text); - } - } - } - assertSuggestionsMatch(suggestions, hits); - } - - public void assertMultiContextSuggestions(String value1, String value2, String suggest, String... hits) throws IOException { - String suggestionName = randomAsciiOfLength(10); - CompletionSuggestionBuilder context = SuggestBuilders.completionSuggestion(FIELD).text(suggest).size(10) - .contexts(new CompletionSuggestionBuilder.Contexts2x() - .addContextField("st", value1).addContextField("nd", value2)); - - SearchRequestBuilder suggestionRequest = client().prepareSearch(INDEX).suggest( - new SuggestBuilder().addSuggestion(suggestionName, context)); - SearchResponse suggestResponse = suggestionRequest.execute().actionGet(); - ArrayList suggestions = new ArrayList<>(); - - Suggest suggest2 = suggestResponse.getSuggest(); - for (Suggestion> s : suggest2) { - CompletionSuggestion suggestion = (CompletionSuggestion) s; - for (CompletionSuggestion.Entry entry : suggestion) { - List options = entry.getOptions(); - for (CompletionSuggestion.Entry.Option option : options) { - String payload = option.getPayloadAsString(); - String text = option.getText().string(); - assertEquals(value1 + value2, payload); - suggestions.add(text); - } - } - } - assertSuggestionsMatch(suggestions, hits); - } - - private void assertSuggestionsMatch(List suggestions, String... hits) { - boolean[] suggested = new boolean[hits.length]; - Arrays.sort(hits); - Arrays.fill(suggested, false); - int numSuggestions = 0; - - for (String suggestion : suggestions) { - int hitpos = Arrays.binarySearch(hits, suggestion); - - assertEquals(hits[hitpos], suggestion); - assertTrue(hitpos >= 0); - assertTrue(!suggested[hitpos]); - - suggested[hitpos] = true; - numSuggestions++; - - } - assertEquals(hits.length, numSuggestions); - } - - private XContentBuilder createMapping(String type, ContextBuilder... context) throws IOException { - return createMapping(type, false, context); - } - - private XContentBuilder createMapping(String type, boolean preserveSeparators, ContextBuilder... context) - throws IOException { - return createMapping(type, "simple", "simple", true, preserveSeparators, true, context); - } - - private XContentBuilder createMapping(String type, String indexAnalyzer, String searchAnalyzer, boolean payloads, - boolean preserveSeparators, boolean preservePositionIncrements, - ContextBuilder... contexts) throws IOException { - XContentBuilder mapping = jsonBuilder(); - mapping.startObject(); - mapping.startObject(type); - mapping.startObject("properties"); - mapping.startObject(FIELD); - mapping.field("type", "completion"); - mapping.field("analyzer", indexAnalyzer); - mapping.field("search_analyzer", searchAnalyzer); - mapping.field("payloads", payloads); - mapping.field("preserve_separators", preserveSeparators); - mapping.field("preserve_position_increments", preservePositionIncrements); - - mapping.startObject("context"); - for (ContextBuilder context : contexts) { - mapping.value(context.build()); - } - mapping.endObject(); - - mapping.endObject(); - mapping.endObject(); - mapping.endObject(); - mapping.endObject(); - return mapping; - } -} diff --git a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index acf14bc45e2..2375b7519cc 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -47,7 +47,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.ElectMasterService; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.indices.recovery.RecoveryState; -import org.elasticsearch.indices.ttl.IndicesTTLService; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoryMissingException; @@ -69,7 +68,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.EnumSet; import java.util.List; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; @@ -113,12 +111,9 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> set test persistent setting"); client.admin().cluster().prepareUpdateSettings().setPersistentSettings( Settings.builder() - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2) - .put(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), random, TimeUnit.MINUTES)) + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 2)) .execute().actionGet(); - assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(random).millis())); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), -1), equalTo(2)); @@ -136,11 +131,10 @@ public class DedicatedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTest logger.info("--> clean the test persistent setting"); client.admin().cluster().prepareUpdateSettings().setPersistentSettings( Settings.builder() - .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1) - .put(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1))) + .put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), 1)) .execute().actionGet(); assertThat(client.admin().cluster().prepareState().setRoutingTable(false).setNodes(false).execute().actionGet().getState() - .getMetaData().persistentSettings().getAsTime(IndicesTTLService.INDICES_TTL_INTERVAL_SETTING.getKey(), TimeValue.timeValueMinutes(1)).millis(), equalTo(TimeValue.timeValueMinutes(1).millis())); + .getMetaData().persistentSettings().getAsInt(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), -1), equalTo(1)); stopNode(secondNode); assertThat(client.admin().cluster().prepareHealth().setWaitForNodes("1").get().isTimedOut(), equalTo(false)); diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index f750175889b..4765292be1d 100644 --- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -69,7 +69,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.InvalidIndexNameException; -import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.ingest.IngestTestPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.IndexId; @@ -95,6 +94,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; @@ -446,8 +446,8 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas logger.info("--> creating test template"); assertThat(client.admin().indices().preparePutTemplate("test-template").setPatterns(Collections.singletonList("te*")).addMapping("test-mapping", XContentFactory.jsonBuilder().startObject().startObject("test-mapping").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "text").field("store", "yes").endObject() + .startObject("field2").field("type", "keyword").field("store", "yes").endObject() .endObject().endObject().endObject()).get().isAcknowledged(), equalTo(true)); logger.info("--> snapshot"); @@ -487,8 +487,8 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas if(testTemplate) { logger.info("--> creating test template"); assertThat(client.admin().indices().preparePutTemplate("test-template").setPatterns(Collections.singletonList("te*")).addMapping("test-mapping", XContentFactory.jsonBuilder().startObject().startObject("test-mapping").startObject("properties") - .startObject("field1").field("type", "string").field("store", "yes").endObject() - .startObject("field2").field("type", "string").field("store", "yes").field("index", "not_analyzed").endObject() + .startObject("field1").field("type", "text").field("store", "yes").endObject() + .startObject("field2").field("type", "keyword").field("store", "yes").endObject() .endObject().endObject().endObject()).get().isAcknowledged(), equalTo(true)); } @@ -1066,6 +1066,44 @@ public class SharedClusterSnapshotRestoreIT extends AbstractSnapshotIntegTestCas assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse.getSnapshotInfo().totalShards())); } + public void testSnapshotWithMissingShardLevelIndexFile() throws Exception { + Path repo = randomRepoPath(); + logger.info("--> creating repository at {}", repo.toAbsolutePath()); + assertAcked(client().admin().cluster().preparePutRepository("test-repo").setType("fs").setSettings( + Settings.builder().put("location", repo).put("compress", false))); + + createIndex("test-idx-1", "test-idx-2"); + logger.info("--> indexing some data"); + indexRandom(true, + client().prepareIndex("test-idx-1", "doc").setSource("foo", "bar"), + client().prepareIndex("test-idx-2", "doc").setSource("foo", "bar")); + + logger.info("--> creating snapshot"); + client().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-1") + .setWaitForCompletion(true).setIndices("test-idx-*").get(); + + logger.info("--> deleting shard level index file"); + try (Stream files = Files.list(repo.resolve("indices"))) { + files.forEach(indexPath -> + IOUtils.deleteFilesIgnoringExceptions(indexPath.resolve("0").resolve("index-0")) + ); + } + + logger.info("--> creating another snapshot"); + CreateSnapshotResponse createSnapshotResponse = + client().admin().cluster().prepareCreateSnapshot("test-repo", "test-snap-2") + .setWaitForCompletion(true).setIndices("test-idx-1").get(); + assertThat(createSnapshotResponse.getSnapshotInfo().successfulShards(), greaterThan(0)); + assertEquals(createSnapshotResponse.getSnapshotInfo().successfulShards(), createSnapshotResponse.getSnapshotInfo().totalShards()); + + logger.info("--> restoring the first snapshot, the repository should not have lost any shard data despite deleting index-N, " + + "because it should have iterated over the snap-*.data files as backup"); + client().admin().indices().prepareDelete("test-idx-1", "test-idx-2").get(); + RestoreSnapshotResponse restoreSnapshotResponse = + client().admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap-1").setWaitForCompletion(true).get(); + assertEquals(0, restoreSnapshotResponse.getRestoreInfo().failedShards()); + } + public void testSnapshotClosedIndex() throws Exception { Client client = client(); diff --git a/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java b/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java deleted file mode 100644 index e64a695d888..00000000000 --- a/core/src/test/java/org/elasticsearch/timestamp/SimpleTimestampIT.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.timestamp; - -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.common.Priority; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Locale; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.notNullValue; - -public class SimpleTimestampIT extends ESIntegTestCase { - - private static final Settings BW_SETTINGS = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0).build(); - - @Override - protected Collection> nodePlugins() { - return Arrays.asList(InternalSettingsPlugin.class); - } - - public void testSimpleTimestamp() throws Exception { - client().admin().indices().prepareCreate("test") - .setSettings(BW_SETTINGS) - .addMapping("type1", jsonBuilder().startObject().startObject("type1").startObject("_timestamp").field("enabled", true).endObject().endObject().endObject()) - .execute().actionGet(); - client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet(); - - logger.info("--> check with automatic timestamp"); - long now1 = System.currentTimeMillis(); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefreshPolicy(IMMEDIATE).get(); - long now2 = System.currentTimeMillis(); - - // non realtime get (stored) - GetResponse getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet(); - long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); - assertThat(timestamp, greaterThanOrEqualTo(now1)); - assertThat(timestamp, lessThanOrEqualTo(now2)); - // verify its the same timestamp when going the replica - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(randomBoolean()).execute().actionGet(); - assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); - - logger.info("--> check with custom timestamp (numeric)"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimestamp("10").setRefreshPolicy(IMMEDIATE).get(); - - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); - timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); - assertThat(timestamp, equalTo(10L)); - // verify its the same timestamp when going the replica - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); - assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); - - logger.info("--> check with custom timestamp (string)"); - client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setTimestamp("1970-01-01T00:00:00.020") - .setRefreshPolicy(IMMEDIATE).get(); - - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); - timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); - assertThat(timestamp, equalTo(20L)); - // verify its the same timestamp when going the replica - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_timestamp").setRealtime(false).execute().actionGet(); - assertThat(((Number) getResponse.getField("_timestamp").getValue()).longValue(), equalTo(timestamp)); - } - - // issue #5053 - public void testThatUpdatingMappingShouldNotRemoveTimestampConfiguration() throws Exception { - String index = "foo"; - String type = "mytype"; - - XContentBuilder builder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", true).endObject().endObject(); - assertAcked(client().admin().indices().prepareCreate(index).setSettings(BW_SETTINGS).addMapping(type, builder)); - - // check mapping again - assertTimestampMappingEnabled(index, type, true); - - // update some field in the mapping - XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("properties").startObject("otherField").field("type", "text").endObject().endObject().endObject(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); - assertAcked(putMappingResponse); - - // make sure timestamp field is still in mapping - assertTimestampMappingEnabled(index, type, true); - } - - public void testThatTimestampCanBeSwitchedOnAndOff() throws Exception { - String index = "foo"; - String type = "mytype"; - - XContentBuilder builder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", true).endObject().endObject(); - assertAcked(client().admin().indices().prepareCreate(index).setSettings(BW_SETTINGS).addMapping(type, builder)); - - // check mapping again - assertTimestampMappingEnabled(index, type, true); - - // update some field in the mapping - XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("_timestamp").field("enabled", false).endObject().endObject(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); - assertAcked(putMappingResponse); - - // make sure timestamp field is still in mapping - assertTimestampMappingEnabled(index, type, false); - } - - private void assertTimestampMappingEnabled(String index, String type, boolean enabled) { - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes(type).get(); - MappingMetaData.Timestamp timestamp = getMappingsResponse.getMappings().get(index).get(type).timestamp(); - assertThat(timestamp, is(notNullValue())); - String errMsg = String.format(Locale.ROOT, "Expected timestamp field mapping to be "+ (enabled ? "enabled" : "disabled") +" for %s/%s", index, type); - assertThat(errMsg, timestamp.enabled(), is(enabled)); - } -} diff --git a/core/src/test/java/org/elasticsearch/transport/ConnectionProfileTests.java b/core/src/test/java/org/elasticsearch/transport/ConnectionProfileTests.java new file mode 100644 index 00000000000..1785853d0e1 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/transport/ConnectionProfileTests.java @@ -0,0 +1,111 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.transport; + +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; + +import java.util.EnumSet; + +public class ConnectionProfileTests extends ESTestCase { + + public void testBuildConnectionProfile() { + ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); + TimeValue connectTimeout = TimeValue.timeValueMillis(randomIntBetween(1, 10)); + final boolean setConnectTimeout = randomBoolean(); + if (setConnectTimeout) { + builder.setConnectTimeout(connectTimeout); + } + builder.addConnections(1, TransportRequestOptions.Type.BULK); + builder.addConnections(2, TransportRequestOptions.Type.STATE, TransportRequestOptions.Type.RECOVERY); + builder.addConnections(3, TransportRequestOptions.Type.PING); + IllegalStateException illegalStateException = expectThrows(IllegalStateException.class, builder::build); + assertEquals("not all types are added for this connection profile - missing types: [REG]", illegalStateException.getMessage()); + + IllegalArgumentException illegalArgumentException = expectThrows(IllegalArgumentException.class, + () -> builder.addConnections(4, TransportRequestOptions.Type.REG, TransportRequestOptions.Type.PING)); + assertEquals("type [PING] is already registered", illegalArgumentException.getMessage()); + builder.addConnections(4, TransportRequestOptions.Type.REG); + ConnectionProfile build = builder.build(); + assertEquals(10, build.getNumConnections()); + if (setConnectTimeout) { + assertEquals(connectTimeout, build.getConnectTimeout()); + } else { + assertNull(build.getConnectTimeout()); + } + Integer[] array = new Integer[10]; + for (int i = 0; i < array.length; i++) { + array[i] = i; + } + final int numIters = randomIntBetween(5, 10); + assertEquals(4, build.getHandles().size()); + assertEquals(0, build.getHandles().get(0).offset); + assertEquals(1, build.getHandles().get(0).length); + assertEquals(EnumSet.of(TransportRequestOptions.Type.BULK), build.getHandles().get(0).getTypes()); + Integer channel = build.getHandles().get(0).getChannel(array); + for (int i = 0; i < numIters; i++) { + assertEquals(0, channel.intValue()); + } + + assertEquals(1, build.getHandles().get(1).offset); + assertEquals(2, build.getHandles().get(1).length); + assertEquals(EnumSet.of(TransportRequestOptions.Type.STATE, TransportRequestOptions.Type.RECOVERY), + build.getHandles().get(1).getTypes()); + channel = build.getHandles().get(1).getChannel(array); + for (int i = 0; i < numIters; i++) { + assertThat(channel, Matchers.anyOf(Matchers.is(1), Matchers.is(2))); + } + + assertEquals(3, build.getHandles().get(2).offset); + assertEquals(3, build.getHandles().get(2).length); + assertEquals(EnumSet.of(TransportRequestOptions.Type.PING), build.getHandles().get(2).getTypes()); + channel = build.getHandles().get(2).getChannel(array); + for (int i = 0; i < numIters; i++) { + assertThat(channel, Matchers.anyOf(Matchers.is(3), Matchers.is(4), Matchers.is(5))); + } + + assertEquals(6, build.getHandles().get(3).offset); + assertEquals(4, build.getHandles().get(3).length); + assertEquals(EnumSet.of(TransportRequestOptions.Type.REG), build.getHandles().get(3).getTypes()); + channel = build.getHandles().get(3).getChannel(array); + for (int i = 0; i < numIters; i++) { + assertThat(channel, Matchers.anyOf(Matchers.is(6), Matchers.is(7), Matchers.is(8), Matchers.is(9))); + } + + assertEquals(3, build.getNumConnectionsPerType(TransportRequestOptions.Type.PING)); + assertEquals(4, build.getNumConnectionsPerType(TransportRequestOptions.Type.REG)); + assertEquals(2, build.getNumConnectionsPerType(TransportRequestOptions.Type.STATE)); + assertEquals(2, build.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY)); + assertEquals(1, build.getNumConnectionsPerType(TransportRequestOptions.Type.BULK)); + } + + public void testNoChannels() { + ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); + builder.addConnections(1, TransportRequestOptions.Type.BULK, + TransportRequestOptions.Type.STATE, + TransportRequestOptions.Type.RECOVERY, + TransportRequestOptions.Type.REG); + builder.addConnections(0, TransportRequestOptions.Type.PING); + ConnectionProfile build = builder.build(); + Integer[] array = new Integer[]{Integer.valueOf(0)}; + assertEquals(Integer.valueOf(0), build.getHandles().get(0).getChannel(array)); + expectThrows(IllegalStateException.class, () -> build.getHandles().get(1).getChannel(array)); + } +} diff --git a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java index 0525f4a32dc..3df135df236 100644 --- a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.transport.support.TransportStatus; import java.io.IOException; import java.net.InetSocketAddress; @@ -168,11 +167,6 @@ public class TCPTransportTests extends ESTestCase { } - @Override - protected NodeChannels connectToChannelsLight(DiscoveryNode node) throws IOException { - return new NodeChannels(new Object[0], new Object[0], new Object[0], new Object[0], new Object[0]); - } - @Override protected void sendMessage(Object o, BytesReference reference, Runnable sendListener) throws IOException { StreamInput streamIn = reference.streamInput(); @@ -198,8 +192,8 @@ public class TCPTransportTests extends ESTestCase { } @Override - protected NodeChannels connectToChannels(DiscoveryNode node) throws IOException { - return new NodeChannels(new Object[0], new Object[0], new Object[0], new Object[0], new Object[0]); + protected NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile profile) throws IOException { + return new NodeChannels(new Object[profile.getNumConnections()], profile); } @Override @@ -214,7 +208,8 @@ public class TCPTransportTests extends ESTestCase { @Override protected Object nodeChannel(DiscoveryNode node, TransportRequestOptions options) throws ConnectTransportException { - return new NodeChannels(new Object[0], new Object[0], new Object[0], new Object[0], new Object[0]); + return new NodeChannels(new Object[ConnectionProfile.LIGHT_PROFILE.getNumConnections()], + ConnectionProfile.LIGHT_PROFILE); } }; DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Version.CURRENT); @@ -243,4 +238,38 @@ public class TCPTransportTests extends ESTestCase { } } + public void testDefaultConnectionProfile() { + ConnectionProfile profile = TcpTransport.buildDefaultConnectionProfile(Settings.EMPTY); + assertEquals(13, profile.getNumConnections()); + assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.PING)); + assertEquals(6, profile.getNumConnectionsPerType(TransportRequestOptions.Type.REG)); + assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.STATE)); + assertEquals(2, profile.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY)); + assertEquals(3, profile.getNumConnectionsPerType(TransportRequestOptions.Type.BULK)); + + profile = TcpTransport.buildDefaultConnectionProfile(Settings.builder().put("node.master", false).build()); + assertEquals(12, profile.getNumConnections()); + assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.PING)); + assertEquals(6, profile.getNumConnectionsPerType(TransportRequestOptions.Type.REG)); + assertEquals(0, profile.getNumConnectionsPerType(TransportRequestOptions.Type.STATE)); + assertEquals(2, profile.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY)); + assertEquals(3, profile.getNumConnectionsPerType(TransportRequestOptions.Type.BULK)); + + profile = TcpTransport.buildDefaultConnectionProfile(Settings.builder().put("node.data", false).build()); + assertEquals(11, profile.getNumConnections()); + assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.PING)); + assertEquals(6, profile.getNumConnectionsPerType(TransportRequestOptions.Type.REG)); + assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.STATE)); + assertEquals(0, profile.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY)); + assertEquals(3, profile.getNumConnectionsPerType(TransportRequestOptions.Type.BULK)); + + profile = TcpTransport.buildDefaultConnectionProfile(Settings.builder().put("node.data", false).put("node.master", false).build()); + assertEquals(10, profile.getNumConnections()); + assertEquals(1, profile.getNumConnectionsPerType(TransportRequestOptions.Type.PING)); + assertEquals(6, profile.getNumConnectionsPerType(TransportRequestOptions.Type.REG)); + assertEquals(0, profile.getNumConnectionsPerType(TransportRequestOptions.Type.STATE)); + assertEquals(0, profile.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY)); + assertEquals(3, profile.getNumConnectionsPerType(TransportRequestOptions.Type.BULK)); + } + } diff --git a/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java b/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java index 7fccc42bb79..45aca7fe2c0 100644 --- a/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java @@ -113,7 +113,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { emptySet(), Version.CURRENT.minimumCompatibilityVersion()); DiscoveryNode connectedNode = - handleA.transportService.connectToNodeLightAndHandshake(discoveryNode, timeout); + handleA.transportService.connectToNodeAndHandshake(discoveryNode, timeout); assertNotNull(connectedNode); // the name and version should be updated @@ -132,7 +132,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion()); - IllegalStateException ex = expectThrows(IllegalStateException.class, () -> handleA.transportService.connectToNodeLightAndHandshake( + IllegalStateException ex = expectThrows(IllegalStateException.class, () -> handleA.transportService.connectToNodeAndHandshake( discoveryNode, timeout)); assertThat(ex.getMessage(), containsString("handshake failed, mismatched cluster name [Cluster [b]]")); assertFalse(handleA.transportService.nodeConnected(discoveryNode)); @@ -149,7 +149,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion()); - IllegalStateException ex = expectThrows(IllegalStateException.class, () -> handleA.transportService.connectToNodeLightAndHandshake( + IllegalStateException ex = expectThrows(IllegalStateException.class, () -> handleA.transportService.connectToNodeAndHandshake( discoveryNode, timeout)); assertThat(ex.getMessage(), containsString("handshake failed, incompatible version")); assertFalse(handleA.transportService.nodeConnected(discoveryNode)); @@ -171,7 +171,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion()); - DiscoveryNode connectedNode = handleA.transportService.connectToNodeLightAndHandshake(discoveryNode, timeout, false); + DiscoveryNode connectedNode = handleA.transportService.connectToNodeAndHandshake(discoveryNode, timeout, false); assertNotNull(connectedNode); assertEquals(connectedNode.getName(), "TS_B"); assertEquals(connectedNode.getVersion(), handleB.discoveryNode.getVersion()); diff --git a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java b/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java deleted file mode 100644 index 5716e57c96f..00000000000 --- a/core/src/test/java/org/elasticsearch/ttl/SimpleTTLIT.java +++ /dev/null @@ -1,305 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.ttl; - -import org.elasticsearch.Version; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.update.UpdateRequestBuilder; -import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; -import org.elasticsearch.test.ESIntegTestCase.Scope; - -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; -import java.util.Locale; -import java.util.Map; -import java.util.concurrent.TimeUnit; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.hamcrest.Matchers.both; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.nullValue; - -@ClusterScope(scope= Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) -public class SimpleTTLIT extends ESIntegTestCase { - - private static final long PURGE_INTERVAL = 200; - - @Override - protected int numberOfShards() { - return 2; - } - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(InternalSettingsPlugin.class); - } - - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("indices.ttl.interval", PURGE_INTERVAL, TimeUnit.MILLISECONDS) - .build(); - } - - public void testSimpleTTL() throws Exception { - assertAcked(prepareCreate("test") - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id) - .addMapping("type1", XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).endObject() - .endObject() - .endObject()) - .addMapping("type2", XContentFactory.jsonBuilder() - .startObject() - .startObject("type2") - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).field("default", "1d").endObject() - .endObject() - .endObject())); - - final NumShards test = getNumShards("test"); - - long providedTTLValue = 3000; - logger.info("--> checking ttl"); - // Index one doc without routing, one doc with routing, one doc with not TTL and no default and one doc with default TTL - long now = System.currentTimeMillis(); - IndexResponse indexResponse = client().prepareIndex("test", "type1", "1").setSource("field1", "value1") - .setTimestamp(String.valueOf(now)).setTTL(providedTTLValue).setRefreshPolicy(IMMEDIATE).get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - indexResponse = client().prepareIndex("test", "type1", "with_routing").setSource("field1", "value1") - .setTimestamp(String.valueOf(now)).setTTL(providedTTLValue).setRouting("routing").setRefreshPolicy(IMMEDIATE).get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - indexResponse = client().prepareIndex("test", "type1", "no_ttl").setSource("field1", "value1").get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - indexResponse = client().prepareIndex("test", "type2", "default_ttl").setSource("field1", "value1").get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - - // realtime get check - long currentTime = System.currentTimeMillis(); - GetResponse getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").get(); - long ttl0; - if (getResponse.isExists()) { - ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); - } else { - assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L)); - } - // verify the ttl is still decreasing when going to the replica - currentTime = System.currentTimeMillis(); - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").get(); - if (getResponse.isExists()) { - ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); - } else { - assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L)); - } - // non realtime get (stored) - currentTime = System.currentTimeMillis(); - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).get(); - if (getResponse.isExists()) { - ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); - } else { - assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L)); - } - // non realtime get going the replica - currentTime = System.currentTimeMillis(); - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).get(); - if (getResponse.isExists()) { - ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl0, lessThanOrEqualTo(providedTTLValue - (currentTime - now))); - } else { - assertThat(providedTTLValue - (currentTime - now), lessThanOrEqualTo(0L)); - } - - // no TTL provided so no TTL fetched - getResponse = client().prepareGet("test", "type1", "no_ttl").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - assertThat(getResponse.getField("_ttl"), nullValue()); - // no TTL provided make sure it has default TTL - getResponse = client().prepareGet("test", "type2", "default_ttl").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - ttl0 = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl0, greaterThan(0L)); - - IndicesStatsResponse response = client().admin().indices().prepareStats("test").clear().setIndexing(true).get(); - assertThat(response.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount(), equalTo(0L)); - - // make sure the purger has done its job for all indexed docs that are expired - long shouldBeExpiredDate = now + providedTTLValue + PURGE_INTERVAL + 2000; - currentTime = System.currentTimeMillis(); - if (shouldBeExpiredDate - currentTime > 0) { - Thread.sleep(shouldBeExpiredDate - currentTime); - } - - // We can't assume that after waiting for ttl + purgeInterval (waitTime) that the document have actually been deleted. - // The ttl purging happens in the background in a different thread, and might not have been completed after waiting for waitTime. - // But we can use index statistics' delete count to be sure that deletes have been executed, that must be incremented before - // ttl purging has finished. - logger.info("--> checking purger"); - assertTrue(awaitBusy(() -> { - if (rarely()) { - client().admin().indices().prepareFlush("test").get(); - } else if (rarely()) { - client().admin().indices().prepareForceMerge("test").setMaxNumSegments(1).get(); - } - IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats("test").clear().setIndexing(true).get(); - // TTL deletes two docs, but it is indexed in the primary shard and replica shard. - return indicesStatsResponse.getIndices().get("test").getTotal().getIndexing().getTotal().getDeleteCount() == 2L * test.dataCopies; - }, - 5, TimeUnit.SECONDS - )); - - // realtime get check - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - // replica realtime get check - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(true).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - - // Need to run a refresh, in order for the non realtime get to work. - client().admin().indices().prepareRefresh("test").execute().actionGet(); - - // non realtime get (stored) check - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - // non realtime get going the replica check - getResponse = client().prepareGet("test", "type1", "1").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - getResponse = client().prepareGet("test", "type1", "with_routing").setRouting("routing").setStoredFields("_ttl").setRealtime(false).execute().actionGet(); - assertThat(getResponse.isExists(), equalTo(false)); - } - - // issue 5053 - public void testThatUpdatingMappingShouldNotRemoveTTLConfiguration() throws Exception { - String index = "foo"; - String type = "mytype"; - - XContentBuilder builder = jsonBuilder().startObject().startObject("_ttl").field("enabled", true).endObject().endObject(); - assertAcked(client().admin().indices().prepareCreate(index).setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id).addMapping(type, builder)); - - // check mapping again - assertTTLMappingEnabled(index, type); - - // update some field in the mapping - XContentBuilder updateMappingBuilder = jsonBuilder().startObject().startObject("properties").startObject("otherField").field("type", "text").endObject().endObject().endObject(); - PutMappingResponse putMappingResponse = client().admin().indices().preparePutMapping(index).setType(type).setSource(updateMappingBuilder).get(); - assertAcked(putMappingResponse); - - // make sure timestamp field is still in mapping - assertTTLMappingEnabled(index, type); - } - - /** - * Test that updates with detect_noop set to true (the default) that don't - * change the source don't change the ttl. This is unexpected behavior and - * documented in ttl-field.asciidoc. If this behavior changes it is safe to - * rewrite this test to reflect the new behavior and to change the - * documentation. - */ - public void testNoopUpdate() throws IOException { - assertAcked(prepareCreate("test") - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id) - .addMapping("type1", XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).endObject() - .endObject() - .endObject())); - - long aLongTime = 10000000; - long firstTtl = aLongTime * 3; - long secondTtl = aLongTime * 2; - long thirdTtl = aLongTime * 1; - IndexResponse indexResponse = client().prepareIndex("test", "type1", "1").setSource("field1", "value1") - .setTTL(firstTtl).setRefreshPolicy(IMMEDIATE).get(); - assertTrue(indexResponse.getResult() == DocWriteResponse.Result.CREATED); - assertThat(getTtl("type1", 1), both(lessThanOrEqualTo(firstTtl)).and(greaterThan(secondTtl))); - - // Updating with the default detect_noop without a change to the document doesn't change the ttl. - UpdateRequestBuilder update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value1").setTtl(secondTtl); - assertThat(updateAndGetTtl(update), both(lessThanOrEqualTo(firstTtl)).and(greaterThan(secondTtl))); - - // Updating with the default detect_noop with a change to the document does change the ttl. - update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value2").setTtl(secondTtl); - assertThat(updateAndGetTtl(update), both(lessThanOrEqualTo(secondTtl)).and(greaterThan(thirdTtl))); - - // Updating with detect_noop=true without a change to the document doesn't change the ttl. - update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value2").setTtl(secondTtl).setDetectNoop(true); - assertThat(updateAndGetTtl(update), both(lessThanOrEqualTo(secondTtl)).and(greaterThan(thirdTtl))); - - // Updating with detect_noop=false without a change to the document does change the ttl. - update = client().prepareUpdate("test", "type1", "1").setDoc("field1", "value2").setTtl(thirdTtl).setDetectNoop(false); - assertThat(updateAndGetTtl(update), lessThanOrEqualTo(thirdTtl)); - } - - private long updateAndGetTtl(UpdateRequestBuilder update) { - UpdateResponse updateResponse = update.setFields("_ttl").get(); - assertThat(updateResponse.getShardInfo().getFailed(), equalTo(0)); - // You can't actually fetch _ttl from an update so we use a get. - return getTtl(updateResponse.getType(), updateResponse.getId()); - } - - private long getTtl(String type, Object id) { - GetResponse getResponse = client().prepareGet("test", type, id.toString()).setStoredFields("_ttl").execute() - .actionGet(); - return ((Number) getResponse.getField("_ttl").getValue()).longValue(); - } - - private void assertTTLMappingEnabled(String index, String type) throws IOException { - String errMsg = String.format(Locale.ROOT, "Expected ttl field mapping to be enabled for %s/%s", index, type); - - GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings(index).addTypes(type).get(); - Map mappingSource = getMappingsResponse.getMappings().get(index).get(type).getSourceAsMap(); - assertThat(errMsg, mappingSource, hasKey("_ttl")); - String ttlAsString = mappingSource.get("_ttl").toString(); - assertThat(ttlAsString, is(notNullValue())); - assertThat(errMsg, ttlAsString, is("{enabled=true}")); - } -} diff --git a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java b/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java deleted file mode 100644 index 39f1c774634..00000000000 --- a/core/src/test/java/org/elasticsearch/update/TimestampTTLBWIT.java +++ /dev/null @@ -1,237 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.update; - -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.engine.DocumentMissingException; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptType; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHitField; -import org.elasticsearch.search.sort.SortOrder; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalSettingsPlugin; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.Map; - -import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; -import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; -import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThanOrEqualTo; - -public class TimestampTTLBWIT extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Arrays.asList( - UpdateIT.FieldIncrementScriptPlugin.class, - UpdateIT.ExtractContextInSourceScriptPlugin.class, - UpdateIT.PutFieldValuesScriptPlugin.class, - InternalSettingsPlugin.class - ); - } - - public void testSort() throws Exception { - XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("_timestamp").field("enabled", true).endObject() - .endObject().endObject(); - assertAcked(prepareCreate("test") - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id) - .addMapping("type", mapping)); - ensureGreen(); - final int numDocs = randomIntBetween(10, 20); - IndexRequestBuilder[] indexReqs = new IndexRequestBuilder[numDocs]; - for (int i = 0; i < numDocs; ++i) { - indexReqs[i] = client().prepareIndex("test", "type", Integer.toString(i)).setTimestamp(Integer.toString(randomInt(1000))) - .setSource(); - } - indexRandom(true, indexReqs); - - SortOrder order = randomFrom(SortOrder.values()); - - SearchResponse searchResponse = client().prepareSearch() - .setQuery(matchAllQuery()) - .setSize(randomIntBetween(1, numDocs + 5)) - .addSort("_timestamp", order) - .addStoredField("_timestamp") - .execute().actionGet(); - assertNoFailures(searchResponse); - SearchHit[] hits = searchResponse.getHits().hits(); - Long previousTs = order == SortOrder.ASC ? 0 : Long.MAX_VALUE; - for (int i = 0; i < hits.length; ++i) { - SearchHitField timestampField = hits[i].getFields().get("_timestamp"); - Long timestamp = timestampField.getValue(); - assertThat(previousTs, order == SortOrder.ASC ? lessThanOrEqualTo(timestamp) : greaterThanOrEqualTo(timestamp)); - previousTs = timestamp; - } - } - - public void testUpdate() throws Exception { - assertAcked(prepareCreate("test").addAlias(new Alias("alias")) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id) - .addMapping("type1", XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).endObject() - .endObject() - .endObject())); - - ensureGreen(); - - try { - client().prepareUpdate(indexOrAlias(), "type1", "1") - .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())).execute().actionGet(); - fail(); - } catch (DocumentMissingException e) { - // all is well - } - - // check TTL is kept after an update without TTL - client().prepareIndex("test", "type1", "2").setSource("field", 1).setTTL(86400000L).setRefreshPolicy(IMMEDIATE).get(); - GetResponse getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet(); - long ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl, greaterThan(0L)); - client().prepareUpdate(indexOrAlias(), "type1", "2") - .setScript(new Script(ScriptType.INLINE, "field_inc", "field", Collections.emptyMap())).execute().actionGet(); - getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet(); - ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl, greaterThan(0L)); - - // check TTL update - client().prepareUpdate(indexOrAlias(), "type1", "2") - .setScript(new Script(ScriptType.INLINE, "put_values", "", - Collections.singletonMap("_ctx", Collections.singletonMap("_ttl", 3600000)))).execute().actionGet(); - getResponse = client().prepareGet("test", "type1", "2").setStoredFields("_ttl").execute().actionGet(); - ttl = ((Number) getResponse.getField("_ttl").getValue()).longValue(); - assertThat(ttl, greaterThan(0L)); - assertThat(ttl, lessThanOrEqualTo(3600000L)); - - // check timestamp update - client().prepareIndex("test", "type1", "3").setSource("field", 1).setRefreshPolicy(IMMEDIATE).get(); - client().prepareUpdate(indexOrAlias(), "type1", "3") - .setScript(new Script(ScriptType.INLINE, "put_values", "", - Collections.singletonMap("_ctx", Collections.singletonMap("_timestamp", "2009-11-15T14:12:12")))).execute() - .actionGet(); - getResponse = client().prepareGet("test", "type1", "3").setStoredFields("_timestamp").execute().actionGet(); - long timestamp = ((Number) getResponse.getField("_timestamp").getValue()).longValue(); - assertThat(timestamp, equalTo(1258294332000L)); - } - - public void testContextVariables() throws Exception { - assertAcked(prepareCreate("test").addAlias(new Alias("alias")) - .setSettings(IndexMetaData.SETTING_VERSION_CREATED, Version.V_2_3_0.id) - .addMapping("type1", XContentFactory.jsonBuilder() - .startObject() - .startObject("type1") - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).endObject() - .endObject() - .endObject()) - .addMapping("subtype1", XContentFactory.jsonBuilder() - .startObject() - .startObject("subtype1") - .startObject("_parent").field("type", "type1").endObject() - .startObject("_timestamp").field("enabled", true).endObject() - .startObject("_ttl").field("enabled", true).endObject() - .endObject() - .endObject()) - ); - ensureGreen(); - - // Index some documents - long timestamp = System.currentTimeMillis(); - client().prepareIndex() - .setIndex("test") - .setType("type1") - .setId("parentId1") - .setTimestamp(String.valueOf(timestamp-1)) - .setSource("field1", 0, "content", "bar") - .execute().actionGet(); - - long ttl = 10000; - client().prepareIndex() - .setIndex("test") - .setType("subtype1") - .setId("id1") - .setParent("parentId1") - .setRouting("routing1") - .setTimestamp(String.valueOf(timestamp)) - .setTTL(ttl) - .setSource("field1", 1, "content", "foo") - .execute().actionGet(); - - // Update the first object and note context variables values - UpdateResponse updateResponse = client().prepareUpdate("test", "subtype1", "id1") - .setRouting("routing1") - .setScript(new Script(ScriptType.INLINE, "extract_ctx", "", Collections.emptyMap())) - .execute().actionGet(); - - assertEquals(2, updateResponse.getVersion()); - - GetResponse getResponse = client().prepareGet("test", "subtype1", "id1").setRouting("routing1").execute().actionGet(); - Map updateContext = (Map) getResponse.getSourceAsMap().get("update_context"); - assertEquals("test", updateContext.get("_index")); - assertEquals("subtype1", updateContext.get("_type")); - assertEquals("id1", updateContext.get("_id")); - assertEquals(1, updateContext.get("_version")); - assertEquals("parentId1", updateContext.get("_parent")); - assertEquals("routing1", updateContext.get("_routing")); - assertThat(((Integer) updateContext.get("_ttl")).longValue(), allOf(greaterThanOrEqualTo(ttl-3000), lessThanOrEqualTo(ttl))); - - // Idem with the second object - updateResponse = client().prepareUpdate("test", "type1", "parentId1") - .setScript(new Script(ScriptType.INLINE, "extract_ctx", "", Collections.emptyMap())) - .execute().actionGet(); - - assertEquals(2, updateResponse.getVersion()); - - getResponse = client().prepareGet("test", "type1", "parentId1").execute().actionGet(); - updateContext = (Map) getResponse.getSourceAsMap().get("update_context"); - assertEquals("test", updateContext.get("_index")); - assertEquals("type1", updateContext.get("_type")); - assertEquals("parentId1", updateContext.get("_id")); - assertEquals(1, updateContext.get("_version")); - assertNull(updateContext.get("_parent")); - assertNull(updateContext.get("_routing")); - assertNull(updateContext.get("_ttl")); - } - - private static String indexOrAlias() { - return randomBoolean() ? "test" : "alias"; - } -} diff --git a/core/src/test/java/org/elasticsearch/update/UpdateIT.java b/core/src/test/java/org/elasticsearch/update/UpdateIT.java index a3903023edf..2c98b393642 100644 --- a/core/src/test/java/org/elasticsearch/update/UpdateIT.java +++ b/core/src/test/java/org/elasticsearch/update/UpdateIT.java @@ -743,24 +743,19 @@ public class UpdateIT extends ESIntegTestCase { ensureGreen(); // Index some documents - long timestamp = System.currentTimeMillis(); client().prepareIndex() .setIndex("test") .setType("type1") .setId("parentId1") - .setTimestamp(String.valueOf(timestamp-1)) .setSource("field1", 0, "content", "bar") .execute().actionGet(); - long ttl = 10000; client().prepareIndex() .setIndex("test") .setType("subtype1") .setId("id1") .setParent("parentId1") .setRouting("routing1") - .setTimestamp(String.valueOf(timestamp)) - .setTTL(ttl) .setSource("field1", 1, "content", "foo") .execute().actionGet(); diff --git a/core/src/test/resources/indices/bwc/index-5.0.2.zip b/core/src/test/resources/indices/bwc/index-5.0.2.zip new file mode 100644 index 00000000000..3ad5ebb8e5e Binary files /dev/null and b/core/src/test/resources/indices/bwc/index-5.0.2.zip differ diff --git a/core/src/test/resources/indices/bwc/missing-checksum-repo-2.3.4.zip b/core/src/test/resources/indices/bwc/missing-checksum-repo-2.3.4.zip deleted file mode 100644 index 9590f8dbd66..00000000000 Binary files a/core/src/test/resources/indices/bwc/missing-checksum-repo-2.3.4.zip and /dev/null differ diff --git a/core/src/test/resources/indices/bwc/repo-5.0.2.zip b/core/src/test/resources/indices/bwc/repo-5.0.2.zip new file mode 100644 index 00000000000..8a7784e25f1 Binary files /dev/null and b/core/src/test/resources/indices/bwc/repo-5.0.2.zip differ diff --git a/core/src/test/resources/indices/bwc/index-2.0.0-beta1.zip b/core/src/test/resources/indices/bwc/unsupported-2.0.0-beta1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.0.0-beta1.zip rename to core/src/test/resources/indices/bwc/unsupported-2.0.0-beta1.zip diff --git a/core/src/test/resources/indices/bwc/index-2.0.0-beta2.zip b/core/src/test/resources/indices/bwc/unsupported-2.0.0-beta2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.0.0-beta2.zip rename to core/src/test/resources/indices/bwc/unsupported-2.0.0-beta2.zip diff --git a/core/src/test/resources/indices/bwc/index-2.0.0-rc1.zip b/core/src/test/resources/indices/bwc/unsupported-2.0.0-rc1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.0.0-rc1.zip rename to core/src/test/resources/indices/bwc/unsupported-2.0.0-rc1.zip diff --git a/core/src/test/resources/indices/bwc/index-2.0.0.zip b/core/src/test/resources/indices/bwc/unsupported-2.0.0.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.0.0.zip rename to core/src/test/resources/indices/bwc/unsupported-2.0.0.zip diff --git a/core/src/test/resources/indices/bwc/index-2.0.1.zip b/core/src/test/resources/indices/bwc/unsupported-2.0.1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.0.1.zip rename to core/src/test/resources/indices/bwc/unsupported-2.0.1.zip diff --git a/core/src/test/resources/indices/bwc/index-2.0.2.zip b/core/src/test/resources/indices/bwc/unsupported-2.0.2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.0.2.zip rename to core/src/test/resources/indices/bwc/unsupported-2.0.2.zip diff --git a/core/src/test/resources/indices/bwc/index-2.1.0.zip b/core/src/test/resources/indices/bwc/unsupported-2.1.0.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.1.0.zip rename to core/src/test/resources/indices/bwc/unsupported-2.1.0.zip diff --git a/core/src/test/resources/indices/bwc/index-2.1.1.zip b/core/src/test/resources/indices/bwc/unsupported-2.1.1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.1.1.zip rename to core/src/test/resources/indices/bwc/unsupported-2.1.1.zip diff --git a/core/src/test/resources/indices/bwc/index-2.1.2.zip b/core/src/test/resources/indices/bwc/unsupported-2.1.2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.1.2.zip rename to core/src/test/resources/indices/bwc/unsupported-2.1.2.zip diff --git a/core/src/test/resources/indices/bwc/index-2.2.0.zip b/core/src/test/resources/indices/bwc/unsupported-2.2.0.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.2.0.zip rename to core/src/test/resources/indices/bwc/unsupported-2.2.0.zip diff --git a/core/src/test/resources/indices/bwc/index-2.2.1.zip b/core/src/test/resources/indices/bwc/unsupported-2.2.1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.2.1.zip rename to core/src/test/resources/indices/bwc/unsupported-2.2.1.zip diff --git a/core/src/test/resources/indices/bwc/index-2.2.2.zip b/core/src/test/resources/indices/bwc/unsupported-2.2.2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.2.2.zip rename to core/src/test/resources/indices/bwc/unsupported-2.2.2.zip diff --git a/core/src/test/resources/indices/bwc/index-2.3.0.zip b/core/src/test/resources/indices/bwc/unsupported-2.3.0.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.3.0.zip rename to core/src/test/resources/indices/bwc/unsupported-2.3.0.zip diff --git a/core/src/test/resources/indices/bwc/index-2.3.1.zip b/core/src/test/resources/indices/bwc/unsupported-2.3.1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.3.1.zip rename to core/src/test/resources/indices/bwc/unsupported-2.3.1.zip diff --git a/core/src/test/resources/indices/bwc/index-2.3.2.zip b/core/src/test/resources/indices/bwc/unsupported-2.3.2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.3.2.zip rename to core/src/test/resources/indices/bwc/unsupported-2.3.2.zip diff --git a/core/src/test/resources/indices/bwc/index-2.3.3.zip b/core/src/test/resources/indices/bwc/unsupported-2.3.3.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.3.3.zip rename to core/src/test/resources/indices/bwc/unsupported-2.3.3.zip diff --git a/core/src/test/resources/indices/bwc/index-2.3.4.zip b/core/src/test/resources/indices/bwc/unsupported-2.3.4.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.3.4.zip rename to core/src/test/resources/indices/bwc/unsupported-2.3.4.zip diff --git a/core/src/test/resources/indices/bwc/index-2.3.5.zip b/core/src/test/resources/indices/bwc/unsupported-2.3.5.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.3.5.zip rename to core/src/test/resources/indices/bwc/unsupported-2.3.5.zip diff --git a/core/src/test/resources/indices/bwc/index-2.4.0.zip b/core/src/test/resources/indices/bwc/unsupported-2.4.0.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.4.0.zip rename to core/src/test/resources/indices/bwc/unsupported-2.4.0.zip diff --git a/core/src/test/resources/indices/bwc/index-2.4.1.zip b/core/src/test/resources/indices/bwc/unsupported-2.4.1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.4.1.zip rename to core/src/test/resources/indices/bwc/unsupported-2.4.1.zip diff --git a/core/src/test/resources/indices/bwc/index-2.4.2.zip b/core/src/test/resources/indices/bwc/unsupported-2.4.2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/index-2.4.2.zip rename to core/src/test/resources/indices/bwc/unsupported-2.4.2.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0-beta1.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.0.0-beta1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.0.0-beta1.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.0.0-beta1.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0-beta2.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.0.0-beta2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.0.0-beta2.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.0.0-beta2.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0-rc1.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.0.0-rc1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.0.0-rc1.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.0.0-rc1.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.0.0.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.0.0.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.0.0.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.0.0.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.0.1.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.0.1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.0.1.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.0.1.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.0.2.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.0.2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.0.2.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.0.2.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.1.0.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.1.0.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.1.0.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.1.0.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.1.1.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.1.1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.1.1.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.1.1.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.1.2.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.1.2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.1.2.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.1.2.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.2.0.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.2.0.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.2.0.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.2.0.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.2.1.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.2.1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.2.1.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.2.1.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.2.2.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.2.2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.2.2.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.2.2.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.3.0.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.3.0.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.3.0.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.3.0.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.3.1.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.3.1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.3.1.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.3.1.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.3.2.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.3.2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.3.2.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.3.2.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.3.3.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.3.3.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.3.3.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.3.3.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.3.4.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.3.4.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.3.4.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.3.4.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.3.5.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.3.5.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.3.5.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.3.5.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.4.0.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.4.0.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.4.0.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.4.0.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.4.1.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.4.1.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.4.1.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.4.1.zip diff --git a/core/src/test/resources/indices/bwc/repo-2.4.2.zip b/core/src/test/resources/indices/bwc/unsupportedrepo-2.4.2.zip similarity index 100% rename from core/src/test/resources/indices/bwc/repo-2.4.2.zip rename to core/src/test/resources/indices/bwc/unsupportedrepo-2.4.2.zip diff --git a/core/src/test/resources/org/elasticsearch/action/admin/indices/template/logstash-5.0.template.json b/core/src/test/resources/org/elasticsearch/action/admin/indices/template/logstash-5.0.template.json index fc6406c457a..1cabbeb3d0c 100644 --- a/core/src/test/resources/org/elasticsearch/action/admin/indices/template/logstash-5.0.template.json +++ b/core/src/test/resources/org/elasticsearch/action/admin/indices/template/logstash-5.0.template.json @@ -1,5 +1,6 @@ { "template" : "logstash-*", + "version" : 50001, "settings" : { "index.refresh_interval" : "5s" }, @@ -8,11 +9,11 @@ "_all" : {"enabled" : true, "norms" : false}, "dynamic_templates" : [ { "message_field" : { - "match" : "message", + "path_match" : "message", "match_mapping_type" : "string", "mapping" : { - "type" : "string", "index" : "analyzed", "norms" : false, - "fielddata" : { "format" : "disabled" } + "type" : "text", + "norms" : false } } }, { diff --git a/distribution/licenses/joda-time-2.9.5.jar.sha1 b/distribution/licenses/joda-time-2.9.5.jar.sha1 deleted file mode 100644 index ecf1c781556..00000000000 --- a/distribution/licenses/joda-time-2.9.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5f01da7306363fad2028b916f3eab926262de928 \ No newline at end of file diff --git a/distribution/src/main/packaging/scripts/postinst b/distribution/src/main/packaging/scripts/postinst index 3d5eeeedd8f..d9f7e6ae126 100644 --- a/distribution/src/main/packaging/scripts/postinst +++ b/distribution/src/main/packaging/scripts/postinst @@ -52,10 +52,17 @@ case "$1" in esac # to pick up /usr/lib/sysctl.d/elasticsearch.conf -if command -v /usr/lib/systemd/systemd-sysctl > /dev/null; then - /usr/lib/systemd/systemd-sysctl -elif command -v /lib/systemd/systemd-sysctl > /dev/null; then - /lib/systemd/systemd-sysctl +if [ "${ES_SKIP_SET_KERNEL_PARAMETERS:-false}" == "false" ]; then + if command -v /usr/lib/systemd/systemd-sysctl > /dev/null; then + /usr/lib/systemd/systemd-sysctl + elif command -v /lib/systemd/systemd-sysctl > /dev/null; then + /lib/systemd/systemd-sysctl + fi +elif [ "$ES_SKIP_SET_KERNEL_PARAMETERS" == "true" ]; then + echo "skipping setting kernel parameters" +else + echo "unrecognized value [$ES_SKIP_SET_KERNEL_PARAMETERS] for ES_SKIP_SET_KERNEL_PARAMETERS; must [false] (default) or [true]" + exit 1 fi if [ "x$IS_UPGRADE" != "xtrue" ]; then diff --git a/distribution/src/main/resources/config/jvm.options b/distribution/src/main/resources/config/jvm.options index 37c4d5b3c93..11ec1cd66dc 100644 --- a/distribution/src/main/resources/config/jvm.options +++ b/distribution/src/main/resources/config/jvm.options @@ -47,9 +47,12 @@ ## basic -# force the server VM +# force the server VM (remove on 32-bit client JVMs) -server +# explicitly set the stack size (reduce to 320k on 32-bit client JVMs) +-Xss1m + # set to headless, just in case -Djava.awt.headless=true diff --git a/docs/build.gradle b/docs/build.gradle index ec9800bab0f..891a8400e4c 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -92,6 +92,7 @@ buildRestTests.expectedUnconvertedCandidates = [ 'reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc', 'reference/analysis/tokenfilters/stop-tokenfilter.asciidoc', 'reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc', + 'reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc', 'reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc', 'reference/cat/recovery.asciidoc', 'reference/cat/shards.asciidoc', @@ -343,3 +344,28 @@ buildRestTests.doFirst { buildRestTests.setups['bank'] = buildRestTests.setups['bank'].replace('#bank_data#', accounts) } + +buildRestTests.setups['range_index'] = ''' + - do : + indices.create: + index: range_index + body: + settings: + number_of_shards: 2 + number_of_replicas: 1 + mappings: + my_type: + properties: + expected_attendees: + type: integer_range + time_frame: + type: date_range + format: yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis + - do: + bulk: + index: range_index + type: my_type + refresh: true + body: | + {"index":{"_id": 1}} + {"expected_attendees": {"gte": 10, "lte": 20}, "time_frame": {"gte": "2015-10-31 12:00:00", "lte": "2015-11-01"}}''' diff --git a/docs/community-clients/index.asciidoc b/docs/community-clients/index.asciidoc index 26520aedabf..6905e284fb5 100644 --- a/docs/community-clients/index.asciidoc +++ b/docs/community-clients/index.asciidoc @@ -241,6 +241,9 @@ The following project appears to be abandoned: [[smalltalk]] == Smalltalk +* https://github.com/newapplesho/elasticsearch-smalltalk[elasticsearch-smalltalk] - + Pharo Smalltalk client for Elasticsearch + * http://ss3.gemstone.com/ss/Elasticsearch.html[Elasticsearch] - Smalltalk client for Elasticsearch diff --git a/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc index 9617fbc50f7..99b871730e6 100644 --- a/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -16,7 +16,7 @@ AggregationBuilder aggregation = AggregationBuilders .dateHistogram("agg") .field("dateOfBirth") - .interval(DateHistogramInterval.YEAR); + .dateHistogramInterval(DateHistogramInterval.YEAR); -------------------------------------------------- Or if you want to set an interval of 10 days: @@ -27,7 +27,7 @@ AggregationBuilder aggregation = AggregationBuilders .dateHistogram("agg") .field("dateOfBirth") - .interval(DateHistogramInterval.days(10)); + .dateHistogramInterval(DateHistogramInterval.days(10)); -------------------------------------------------- diff --git a/docs/java-api/aggregations/bucket/filters-aggregation.asciidoc b/docs/java-api/aggregations/bucket/filters-aggregation.asciidoc index 1734bff6a8a..0b782304dac 100644 --- a/docs/java-api/aggregations/bucket/filters-aggregation.asciidoc +++ b/docs/java-api/aggregations/bucket/filters-aggregation.asciidoc @@ -14,8 +14,9 @@ Here is an example on how to create the aggregation request: -------------------------------------------------- AggregationBuilder aggregation = AggregationBuilders - .filters("agg", new KeyedFilter("men", QueryBuilders.termQuery("gender", "male")), - new KeyedFilter("women", QueryBuilders.termQuery("gender", "female"))); + .filters("agg", + new FiltersAggregator.KeyedFilter("men", QueryBuilders.termQuery("gender", "male")), + new FiltersAggregator.KeyedFilter("women", QueryBuilders.termQuery("gender", "female"))); -------------------------------------------------- diff --git a/docs/java-api/aggregations/bucket/histogram-aggregation.asciidoc b/docs/java-api/aggregations/bucket/histogram-aggregation.asciidoc index bc1803e8617..28e9cd3ecd0 100644 --- a/docs/java-api/aggregations/bucket/histogram-aggregation.asciidoc +++ b/docs/java-api/aggregations/bucket/histogram-aggregation.asciidoc @@ -36,7 +36,7 @@ Histogram agg = sr.getAggregations().get("agg"); // For each entry for (Histogram.Bucket entry : agg.getBuckets()) { - Long key = (Long) entry.getKey(); // Key + Number key = (Number) entry.getKey(); // Key long docCount = entry.getDocCount(); // Doc count logger.info("key [{}], doc_count [{}]", key, docCount); diff --git a/docs/java-api/aggregations/bucket/reverse-nested-aggregation.asciidoc b/docs/java-api/aggregations/bucket/reverse-nested-aggregation.asciidoc index 688e9d7d075..635b0e8cf77 100644 --- a/docs/java-api/aggregations/bucket/reverse-nested-aggregation.asciidoc +++ b/docs/java-api/aggregations/bucket/reverse-nested-aggregation.asciidoc @@ -14,7 +14,7 @@ Here is an example on how to create the aggregation request: -------------------------------------------------- AggregationBuilder aggregation = AggregationBuilders - .nested("agg").path("resellers") + .nested("agg", "resellers") .subAggregation( AggregationBuilders .terms("name").field("resellers.name") diff --git a/docs/java-api/aggregations/metrics/avg-aggregation.asciidoc b/docs/java-api/aggregations/metrics/avg-aggregation.asciidoc index 74c35c0898e..511cbabf5c8 100644 --- a/docs/java-api/aggregations/metrics/avg-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/avg-aggregation.asciidoc @@ -12,7 +12,7 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +AvgAggregationBuilder aggregation = AggregationBuilders .avg("agg") .field("height"); diff --git a/docs/java-api/aggregations/metrics/cardinality-aggregation.asciidoc b/docs/java-api/aggregations/metrics/cardinality-aggregation.asciidoc index edde64c1ed2..8a854e553f4 100644 --- a/docs/java-api/aggregations/metrics/cardinality-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/cardinality-aggregation.asciidoc @@ -12,7 +12,7 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +CardinalityAggregationBuilder aggregation = AggregationBuilders .cardinality("agg") .field("tags"); diff --git a/docs/java-api/aggregations/metrics/extendedstats-aggregation.asciidoc b/docs/java-api/aggregations/metrics/extendedstats-aggregation.asciidoc index 20d8db036d9..8f2f12ede68 100644 --- a/docs/java-api/aggregations/metrics/extendedstats-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/extendedstats-aggregation.asciidoc @@ -12,7 +12,7 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +ExtendedStatsAggregationBuilder aggregation = AggregationBuilders .extendedStats("agg") .field("height"); diff --git a/docs/java-api/aggregations/metrics/geobounds-aggregation.asciidoc b/docs/java-api/aggregations/metrics/geobounds-aggregation.asciidoc index ef91d0b7000..ecffabbd583 100644 --- a/docs/java-api/aggregations/metrics/geobounds-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/geobounds-aggregation.asciidoc @@ -13,7 +13,7 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- GeoBoundsBuilder aggregation = - AggregationBuilders + GeoBoundsAggregationBuilder .geoBounds("agg") .field("address.location") .wrapLongitude(true); diff --git a/docs/java-api/aggregations/metrics/max-aggregation.asciidoc b/docs/java-api/aggregations/metrics/max-aggregation.asciidoc index 765e8c9eaa5..9bd39369842 100644 --- a/docs/java-api/aggregations/metrics/max-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/max-aggregation.asciidoc @@ -12,7 +12,7 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +MaxAggregationBuilder aggregation = AggregationBuilders .max("agg") .field("height"); diff --git a/docs/java-api/aggregations/metrics/min-aggregation.asciidoc b/docs/java-api/aggregations/metrics/min-aggregation.asciidoc index 53df26897aa..0205cae44d8 100644 --- a/docs/java-api/aggregations/metrics/min-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/min-aggregation.asciidoc @@ -12,7 +12,7 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +MinAggregationBuilder aggregation = AggregationBuilders .min("agg") .field("height"); diff --git a/docs/java-api/aggregations/metrics/percentile-aggregation.asciidoc b/docs/java-api/aggregations/metrics/percentile-aggregation.asciidoc index a01c291e3ee..ad54fbf5a46 100644 --- a/docs/java-api/aggregations/metrics/percentile-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/percentile-aggregation.asciidoc @@ -12,7 +12,7 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +PercentilesAggregationBuilder aggregation = AggregationBuilders .percentiles("agg") .field("height"); @@ -22,7 +22,7 @@ You can provide your own percentiles instead of using defaults: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +PercentilesAggregationBuilder aggregation = AggregationBuilders .percentiles("agg") .field("height") diff --git a/docs/java-api/aggregations/metrics/percentile-rank-aggregation.asciidoc b/docs/java-api/aggregations/metrics/percentile-rank-aggregation.asciidoc index 5cb615b8a76..a846d59f820 100644 --- a/docs/java-api/aggregations/metrics/percentile-rank-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/percentile-rank-aggregation.asciidoc @@ -12,11 +12,11 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +PercentileRanksAggregationBuilder aggregation = AggregationBuilders .percentileRanks("agg") .field("height") - .percentiles(1.24, 1.91, 2.22); + .values(1.24, 1.91, 2.22); -------------------------------------------------- diff --git a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc index 6b084c13ea8..4ac9701a2f4 100644 --- a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -11,21 +11,20 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = - AggregationBuilders - .scriptedMetric("agg") - .initScript("_agg['heights'] = []") - .mapScript(new Script("if (doc['gender'].value == \"male\") " + - "{ _agg.heights.add(doc['height'].value) } " + - "else " + - "{ _agg.heights.add(-1 * doc['height'].value) }")); +ScriptedMetricAggregationBuilder aggregation = AggregationBuilders + .scriptedMetric("agg") + .initScript(new Script("_agg['heights'] = []")) + .mapScript(new Script("if (doc['gender'].value == \"male\") " + + "{ _agg.heights.add(doc['height'].value) } " + + "else " + + "{ _agg.heights.add(-1 * doc['height'].value) }")); -------------------------------------------------- You can also specify a `combine` script which will be executed on each shard: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") .initScript(new Script("_agg['heights'] = []")) @@ -40,7 +39,7 @@ You can also specify a `reduce` script which will be executed on the node which [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") .initScript(new Script("_agg['heights'] = []")) diff --git a/docs/java-api/aggregations/metrics/stats-aggregation.asciidoc b/docs/java-api/aggregations/metrics/stats-aggregation.asciidoc index 094c372966b..260d9c01cb9 100644 --- a/docs/java-api/aggregations/metrics/stats-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/stats-aggregation.asciidoc @@ -12,7 +12,7 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +StatsAggregationBuilder aggregation = AggregationBuilders .stats("agg") .field("height"); diff --git a/docs/java-api/aggregations/metrics/sum-aggregation.asciidoc b/docs/java-api/aggregations/metrics/sum-aggregation.asciidoc index 6aea336ff72..453616916d7 100644 --- a/docs/java-api/aggregations/metrics/sum-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/sum-aggregation.asciidoc @@ -12,7 +12,7 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +SumAggregationBuilder aggregation = AggregationBuilders .sum("agg") .field("height"); diff --git a/docs/java-api/aggregations/metrics/valuecount-aggregation.asciidoc b/docs/java-api/aggregations/metrics/valuecount-aggregation.asciidoc index 1acf7cea481..b180d22af33 100644 --- a/docs/java-api/aggregations/metrics/valuecount-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/valuecount-aggregation.asciidoc @@ -12,7 +12,7 @@ Here is an example on how to create the aggregation request: [source,java] -------------------------------------------------- -MetricsAggregationBuilder aggregation = +ValueCountAggregationBuilder aggregation = AggregationBuilders .count("agg") .field("height"); diff --git a/docs/java-api/query-dsl/compound-queries.asciidoc b/docs/java-api/query-dsl/compound-queries.asciidoc index 6d7e5e19b74..b93e3b694a5 100644 --- a/docs/java-api/query-dsl/compound-queries.asciidoc +++ b/docs/java-api/query-dsl/compound-queries.asciidoc @@ -37,15 +37,9 @@ implemented with scripting. Return documents which match a `positive` query, but reduce the score of documents which also match a `negative` query. -<>:: - -Execute one query for the specified indices, and another for other indices. - include::constant-score-query.asciidoc[] include::bool-query.asciidoc[] include::dis-max-query.asciidoc[] include::function-score-query.asciidoc[] include::boosting-query.asciidoc[] -include::indices-query.asciidoc[] - diff --git a/docs/java-api/query-dsl/function-score-query.asciidoc b/docs/java-api/query-dsl/function-score-query.asciidoc index 0915814ae1b..9e731edb8ba 100644 --- a/docs/java-api/query-dsl/function-score-query.asciidoc +++ b/docs/java-api/query-dsl/function-score-query.asciidoc @@ -19,6 +19,7 @@ FilterFunctionBuilder[] functions = { new FunctionScoreQueryBuilder.FilterFunctionBuilder( exponentialDecayFunction("age", 0L, 1L)) <3> }; +QueryBuilder qb = QueryBuilders.functionScoreQuery(functions); -------------------------------------------------- <1> Add a first function based on a query <2> And randomize the score based on a given seed diff --git a/docs/java-api/query-dsl/geo-distance-query.asciidoc b/docs/java-api/query-dsl/geo-distance-query.asciidoc index 4209c329f7c..d03ba2017f5 100644 --- a/docs/java-api/query-dsl/geo-distance-query.asciidoc +++ b/docs/java-api/query-dsl/geo-distance-query.asciidoc @@ -7,15 +7,9 @@ See {ref}/query-dsl-geo-distance-query.html[Geo Distance Query] -------------------------------------------------- QueryBuilder qb = geoDistanceQuery("pin.location") <1> .point(40, -70) <2> - .distance(200, DistanceUnit.KILOMETERS) <3> - .optimizeBbox("memory") <4> - .geoDistance(GeoDistance.ARC); <5> - + .distance(200, DistanceUnit.KILOMETERS); <3> -------------------------------------------------- <1> field <2> center point <3> distance from center point -<4> optimize bounding box: `memory`, `indexed` or `none` -<5> distance computation mode: `GeoDistance.SLOPPY_ARC` (default), `GeoDistance.ARC` (slightly more precise but - significantly slower) or `GeoDistance.PLANE` (faster, but inaccurate on long distances and close to the poles) diff --git a/docs/java-api/query-dsl/geo-distance-range-query.asciidoc b/docs/java-api/query-dsl/geo-distance-range-query.asciidoc deleted file mode 100644 index 1abe02a5e81..00000000000 --- a/docs/java-api/query-dsl/geo-distance-range-query.asciidoc +++ /dev/null @@ -1,26 +0,0 @@ -[[java-query-dsl-geo-distance-range-query]] -==== Geo Distance Range Query - -See {ref}/query-dsl-geo-distance-range-query.html[Geo Distance Range Query] - -[source,java] --------------------------------------------------- -QueryBuilder qb = geoDistanceRangeQuery("pin.location", <1> - new GeoPoint(40, -70)) <2> - .from("200km") <3> - .to("400km") <4> - .includeLower(true) <5> - .includeUpper(false) <6> - .optimizeBbox("memory") <7> - .geoDistance(GeoDistance.ARC); <8> --------------------------------------------------- -<1> field -<2> center point -<3> starting distance from center point -<4> ending distance from center point -<5> include lower value means that `from` is `gt` when `false` or `gte` when `true` -<6> include upper value means that `to` is `lt` when `false` or `lte` when `true` -<7> optimize bounding box: `memory`, `indexed` or `none` -<8> distance computation mode: `GeoDistance.SLOPPY_ARC` (default), `GeoDistance.ARC` (slightly more precise but - significantly slower) or `GeoDistance.PLANE` (faster, but inaccurate on long distances and close to the poles) - diff --git a/docs/java-api/query-dsl/geo-polygon-query.asciidoc b/docs/java-api/query-dsl/geo-polygon-query.asciidoc index 1ee344c3098..362e1751bb9 100644 --- a/docs/java-api/query-dsl/geo-polygon-query.asciidoc +++ b/docs/java-api/query-dsl/geo-polygon-query.asciidoc @@ -5,7 +5,7 @@ See {ref}/query-dsl-geo-polygon-query.html[Geo Polygon Query] [source,java] -------------------------------------------------- -List points = new ArrayList(); <1> +List points = new ArrayList<>(); <1> points.add(new GeoPoint(40, -70)); points.add(new GeoPoint(30, -80)); points.add(new GeoPoint(20, -90)); diff --git a/docs/java-api/query-dsl/geo-queries.asciidoc b/docs/java-api/query-dsl/geo-queries.asciidoc index 55184bde326..10df4ff5e87 100644 --- a/docs/java-api/query-dsl/geo-queries.asciidoc +++ b/docs/java-api/query-dsl/geo-queries.asciidoc @@ -21,11 +21,6 @@ The queries in this group are: Finds document with geo-points within the specified distance of a central point. -<> query:: - - Like the `geo_point` query, but the range starts at a specified distance - from the central point. - <> query:: Find documents with geo-points within the specified polygon. @@ -36,6 +31,4 @@ include::geo-bounding-box-query.asciidoc[] include::geo-distance-query.asciidoc[] -include::geo-distance-range-query.asciidoc[] - include::geo-polygon-query.asciidoc[] diff --git a/docs/java-api/query-dsl/geo-shape-query.asciidoc b/docs/java-api/query-dsl/geo-shape-query.asciidoc index e08410acbdb..62e8ee199c9 100644 --- a/docs/java-api/query-dsl/geo-shape-query.asciidoc +++ b/docs/java-api/query-dsl/geo-shape-query.asciidoc @@ -39,31 +39,32 @@ import org.elasticsearch.common.geo.builders.ShapeBuilder; [source,java] -------------------------------------------------- -GeoShapeQueryBuilder qb = geoShapeQuery( - "pin.location", <1> - ShapeBuilder.newMultiPoint() <2> - .point(0, 0) - .point(0, 10) - .point(10, 10) - .point(10, 0) - .point(0, 0)); -qb.relation(ShapeRelation.WITHIN); <3> +List points = new ArrayList<>(); +points.add(new Coordinate(0, 0)); +points.add(new Coordinate(0, 10)); +points.add(new Coordinate(10, 10)); +points.add(new Coordinate(10, 0)); +points.add(new Coordinate(0, 0)); +QueryBuilder qb = geoShapeQuery( + "pin.location", <1> + ShapeBuilders.newMultiPoint(points) <2> + .relation(ShapeRelation.WITHIN); <3> -------------------------------------------------- <1> field <2> shape -<3> relation can be `ShapeRelation.WITHIN`, `ShapeRelation.INTERSECTS` or `ShapeRelation.DISJOINT` +<3> relation can be `ShapeRelation.CONTAINS`, `ShapeRelation.WITHIN`, `ShapeRelation.INTERSECTS` or `ShapeRelation.DISJOINT` [source,java] -------------------------------------------------- // Using pre-indexed shapes -GeoShapeQueryBuilder qb = geoShapeQuery( - "pin.location", <1> - "DEU", <2> - "countries"); <3> -qb.relation(ShapeRelation.WITHIN)) <4> - .indexedShapeIndex("shapes") <5> - .indexedShapePath("location"); <6> +QueryBuilder qb = geoShapeQuery( + "pin.location", <1> + "DEU", <2> + "countries") <3> + .relation(ShapeRelation.WITHIN)) <4> + .indexedShapeIndex("shapes") <5> + .indexedShapePath("location"); <6> -------------------------------------------------- <1> field <2> The ID of the document that containing the pre-indexed shape. diff --git a/docs/java-api/query-dsl/has-child-query.asciidoc b/docs/java-api/query-dsl/has-child-query.asciidoc index 755bb9a820f..f6995227e98 100644 --- a/docs/java-api/query-dsl/has-child-query.asciidoc +++ b/docs/java-api/query-dsl/has-child-query.asciidoc @@ -7,9 +7,11 @@ See {ref}/query-dsl-has-child-query.html[Has Child Query] -------------------------------------------------- QueryBuilder qb = hasChildQuery( "blog_tag", <1> - termQuery("tag","something") <2> + termQuery("tag","something"), <2> + ScoreMode.Avg <3> ); -------------------------------------------------- <1> child type to query against <2> query +<3> score mode can be `ScoreMode.Avg`, `ScoreMode.Max`, `ScoreMode.Min`, `ScoreMode.None` or `ScoreMode.Total` diff --git a/docs/java-api/query-dsl/has-parent-query.asciidoc b/docs/java-api/query-dsl/has-parent-query.asciidoc index 7c984346265..df608613833 100644 --- a/docs/java-api/query-dsl/has-parent-query.asciidoc +++ b/docs/java-api/query-dsl/has-parent-query.asciidoc @@ -7,8 +7,10 @@ See {ref}/query-dsl-has-parent-query.html[Has Parent] -------------------------------------------------- QueryBuilder qb = hasParentQuery( "blog", <1> - termQuery("tag","something") <2> + termQuery("tag","something"), <2> + false <3> ); -------------------------------------------------- <1> parent type to query against <2> query +<3> whether the score from the parent hit should propogate to the child hit diff --git a/docs/java-api/query-dsl/indices-query.asciidoc b/docs/java-api/query-dsl/indices-query.asciidoc deleted file mode 100644 index 4667c5c62ff..00000000000 --- a/docs/java-api/query-dsl/indices-query.asciidoc +++ /dev/null @@ -1,29 +0,0 @@ -[[java-query-dsl-indices-query]] -==== Indices Query - -See {ref}/query-dsl-indices-query.html[Indices Query] - -[source,java] --------------------------------------------------- -// Using another query when no match for the main one -QueryBuilder qb = indicesQuery( - termQuery("tag", "wow"), <1> - "index1", "index2" <2> - ).noMatchQuery(termQuery("tag", "kow")); <3> --------------------------------------------------- -<1> query to be executed on selected indices -<2> selected indices -<3> query to be executed on non matching indices - -[source,java] --------------------------------------------------- -// Using all (match all) or none (match no documents) -QueryBuilder qb = indicesQuery( - termQuery("tag", "wow"), <1> - "index1", "index2" <2> - ).noMatchQuery("all"); <3> --------------------------------------------------- -<1> query to be executed on selected indices -<2> selected indices -<3> `none` (to match no documents), and `all` (to match all documents). Defaults to `all`. - diff --git a/docs/java-api/query-dsl/mlt-query.asciidoc b/docs/java-api/query-dsl/mlt-query.asciidoc index 6be6cb7de2c..7104fe5efa0 100644 --- a/docs/java-api/query-dsl/mlt-query.asciidoc +++ b/docs/java-api/query-dsl/mlt-query.asciidoc @@ -1,5 +1,5 @@ [[java-query-dsl-mlt-query]] -==== More Like This Query (mlt) +==== More Like This Query See: * {ref}/query-dsl-mlt-query.html[More Like This Query] diff --git a/docs/java-api/query-dsl/nested-query.asciidoc b/docs/java-api/query-dsl/nested-query.asciidoc index 69fa1082c19..d6da597eecd 100644 --- a/docs/java-api/query-dsl/nested-query.asciidoc +++ b/docs/java-api/query-dsl/nested-query.asciidoc @@ -9,10 +9,10 @@ QueryBuilder qb = nestedQuery( "obj1", <1> boolQuery() <2> .must(matchQuery("obj1.name", "blue")) - .must(rangeQuery("obj1.count").gt(5)) - ) - .scoreMode(ScoreMode.Avg); <3> + .must(rangeQuery("obj1.count").gt(5)), + ScoreMode.Avg <3> + ); -------------------------------------------------- <1> path to nested document <2> your query. Any fields referenced inside the query must use the complete path (fully qualified). -<3> score mode could be `max`, `total`, `avg` (default) or `none` +<3> score mode could be `ScoreMode.Max`, `ScoreMode.Min`, `ScoreMode.Total`, `ScoreMode.Avg` or `ScoreMode.None` diff --git a/docs/java-api/query-dsl/script-query.asciidoc b/docs/java-api/query-dsl/script-query.asciidoc index 5d30cab4180..5378a6ae37e 100644 --- a/docs/java-api/query-dsl/script-query.asciidoc +++ b/docs/java-api/query-dsl/script-query.asciidoc @@ -25,10 +25,10 @@ You can use it then with: -------------------------------------------------- QueryBuilder qb = scriptQuery( new Script( - "myscript", <1> - ScriptType.FILE, <2> - "painless", <3> - ImmutableMap.of("param1", 5)) <4> + "myscript", <1> + ScriptType.FILE, <2> + "painless", <3> + Collections.singletonMap("param1", 5)) <4> ); -------------------------------------------------- <1> Script name diff --git a/docs/java-api/query-dsl/span-containing-query.asciidoc b/docs/java-api/query-dsl/span-containing-query.asciidoc index 81859eb93f3..a9f762f929d 100644 --- a/docs/java-api/query-dsl/span-containing-query.asciidoc +++ b/docs/java-api/query-dsl/span-containing-query.asciidoc @@ -7,7 +7,7 @@ See {ref}/query-dsl-span-containing-query.html[Span Containing Query] -------------------------------------------------- QueryBuilder qb = spanContainingQuery( spanNearQuery(spanTermQuery("field1","bar"), 5) <1> - .clause(spanTermQuery("field1","baz")) + .addClause(spanTermQuery("field1","baz")) .inOrder(true), spanTermQuery("field1","foo")); <2> -------------------------------------------------- diff --git a/docs/java-api/query-dsl/span-near-query.asciidoc b/docs/java-api/query-dsl/span-near-query.asciidoc index d18d2d74958..6e811d59581 100644 --- a/docs/java-api/query-dsl/span-near-query.asciidoc +++ b/docs/java-api/query-dsl/span-near-query.asciidoc @@ -8,13 +8,11 @@ See {ref}/query-dsl-span-near-query.html[Span Near Query] QueryBuilder qb = spanNearQuery( spanTermQuery("field","value1"), <1> 12) <2> - .clause(spanTermQuery("field","value2")) <1> - .clause(spanTermQuery("field","value3")) <1> - .inOrder(false) <3> - .collectPayloads(false); <4> + .addClause(spanTermQuery("field","value2")) <1> + .addClause(spanTermQuery("field","value3")) <1> + .inOrder(false); <3> -------------------------------------------------- <1> span term queries <2> slop factor: the maximum number of intervening unmatched positions <3> whether matches are required to be in-order -<4> collect payloads or not diff --git a/docs/java-api/query-dsl/span-or-query.asciidoc b/docs/java-api/query-dsl/span-or-query.asciidoc index 61f72a24fcf..54f566c4770 100644 --- a/docs/java-api/query-dsl/span-or-query.asciidoc +++ b/docs/java-api/query-dsl/span-or-query.asciidoc @@ -6,9 +6,9 @@ See {ref}/query-dsl-span-or-query.html[Span Or Query] [source,java] -------------------------------------------------- QueryBuilder qb = spanOrQuery( - spanTermQuery("field","value1")) <1> - .clause(spanTermQuery("field","value2")) <1> - .clause(spanTermQuery("field","value3")); <1> + spanTermQuery("field","value1")) <1> + .addClause(spanTermQuery("field","value2")) <1> + .addClause(spanTermQuery("field","value3")); <1> -------------------------------------------------- <1> span term queries diff --git a/docs/java-api/query-dsl/span-within-query.asciidoc b/docs/java-api/query-dsl/span-within-query.asciidoc index 345dabd8c12..0e8752785fb 100644 --- a/docs/java-api/query-dsl/span-within-query.asciidoc +++ b/docs/java-api/query-dsl/span-within-query.asciidoc @@ -7,7 +7,7 @@ See {ref}/query-dsl-span-within-query.html[Span Within Query] -------------------------------------------------- QueryBuilder qb = spanWithinQuery( spanNearQuery(spanTermQuery("field1", "bar"), 5) <1> - .clause(spanTermQuery("field1", "baz")) + .addClause(spanTermQuery("field1", "baz")) .inOrder(true), spanTermQuery("field1", "foo")); <2> -------------------------------------------------- diff --git a/docs/java-api/query-dsl/special-queries.asciidoc b/docs/java-api/query-dsl/special-queries.asciidoc index 31db47ce636..4e4d59a6d4a 100644 --- a/docs/java-api/query-dsl/special-queries.asciidoc +++ b/docs/java-api/query-dsl/special-queries.asciidoc @@ -9,12 +9,6 @@ This group contains queries which do not fit into the other groups: This query finds documents which are similar to the specified text, document, or collection of documents. -<>:: - -The `template` query accepts a Mustache template (either inline, indexed, or -from a file), and a map of parameters, and combines the two to generate the -final query to execute. - <>:: This query allows a script to act as a filter. Also see the @@ -26,8 +20,6 @@ This query finds percolator queries based on documents. include::mlt-query.asciidoc[] -include::template-query.asciidoc[] - include::script-query.asciidoc[] include::percolate-query.asciidoc[] diff --git a/docs/java-api/query-dsl/template-query.asciidoc b/docs/java-api/query-dsl/template-query.asciidoc deleted file mode 100644 index dfba0d63a63..00000000000 --- a/docs/java-api/query-dsl/template-query.asciidoc +++ /dev/null @@ -1,89 +0,0 @@ -[[java-query-dsl-template-query]] -==== Template Query - -See {ref}/search-template.html[Search Template] documentation - -In order to use the `template` query from the Java API -the lang-mustache module dependency should be on the classpath and -the transport client should be loaded with the lang-mustache plugin: - -[source,java] --------------------------------------------------- -TransportClient transportClient = TransportClient.builder() - .settings(Settings.builder().put("node.name", "node")) - .addPlugin(MustachePlugin.class) - .build(); -transportClient.addTransportAddress( - new InetSocketTransportAddress(new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 9300)) -); --------------------------------------------------- - -Define your template parameters as a `Map`: - -[source,java] --------------------------------------------------- -Map template_params = new HashMap<>(); -template_params.put("param_gender", "male"); --------------------------------------------------- - -You can use your stored search templates in `config/scripts`. -For example, if you have a file named `config/scripts/template_gender.mustache` containing: - -[source,js] --------------------------------------------------- -{ - "template" : { - "query" : { - "match" : { - "gender" : "{{param_gender}}" - } - } - } -} --------------------------------------------------- -// NOTCONSOLE - -Define your template query: - -[source,java] --------------------------------------------------- -QueryBuilder qb = new TemplateQueryBuilder( - "gender_template", <1> - ScriptService.ScriptType.FILE, <2> - template_params); <3> --------------------------------------------------- -<1> template name -<2> template stored on disk in `gender_template.mustache` -<3> parameters - -You can also store your template in the cluster state: - -[source,java] --------------------------------------------------- -client.admin().cluster().preparePutStoredScript() - .setScriptLang("mustache") - .setId("template_gender") - .setSource(new BytesArray( - "{\n" + - " \"template\" : {\n" + - " \"query\" : {\n" + - " \"match\" : {\n" + - " \"gender\" : \"{{param_gender}}\"\n" + - " }\n" + - " }\n" + - " }\n" + - "}")).get(); --------------------------------------------------- - -To execute a stored templates, use `ScriptService.ScriptType.STORED`: - -[source,java] --------------------------------------------------- -QueryBuilder qb = new TemplateQueryBuilder( - "gender_template", <1> - ScriptType.STORED, <2> - template_params); <3> --------------------------------------------------- -<1> template name -<2> template stored in the cluster state -<3> parameters diff --git a/docs/java-api/search.asciidoc b/docs/java-api/search.asciidoc index 2da24e93c22..4b858105a26 100644 --- a/docs/java-api/search.asciidoc +++ b/docs/java-api/search.asciidoc @@ -21,8 +21,7 @@ SearchResponse response = client.prepareSearch("index1", "index2") .setQuery(QueryBuilders.termQuery("multi", "test")) // Query .setPostFilter(QueryBuilders.rangeQuery("age").from(12).to(18)) // Filter .setFrom(0).setSize(60).setExplain(true) - .execute() - .actionGet(); + .get(); -------------------------------------------------- Note that all parameters are optional. Here is the smallest search call @@ -31,7 +30,7 @@ you can write: [source,java] -------------------------------------------------- // MatchAll on the whole cluster with all default options -SearchResponse response = client.prepareSearch().execute().actionGet(); +SearchResponse response = client.prepareSearch().get(); -------------------------------------------------- NOTE: Although the Java API defines the additional search types QUERY_AND_FETCH and @@ -58,7 +57,7 @@ SearchResponse scrollResp = client.prepareSearch(test) .addSort(FieldSortBuilder.DOC_FIELD_NAME, SortOrder.ASC) .setScroll(new TimeValue(60000)) .setQuery(qb) - .setSize(100).execute().actionGet(); //max of 100 hits will be returned for each scroll + .setSize(100).get(); //max of 100 hits will be returned for each scroll //Scroll until no hits are returned do { for (SearchHit hit : scrollResp.getHits().getHits()) { @@ -85,7 +84,7 @@ SearchRequestBuilder srb2 = client MultiSearchResponse sr = client.prepareMultiSearch() .add(srb1) .add(srb2) - .execute().actionGet(); + .get(); // You will get all individual responses from MultiSearchResponse#getResponses() long nbHits = 0; @@ -113,7 +112,7 @@ SearchResponse sr = client.prepareSearch() .field("birth") .dateHistogramInterval(DateHistogramInterval.YEAR) ) - .execute().actionGet(); + .get(); // Get your facet results Terms agg1 = sr.getAggregations().get("agg1"); @@ -142,3 +141,115 @@ if (sr.isTerminatedEarly()) { } -------------------------------------------------- <1> Finish after 1000 docs + +[[java-search-template]] +=== Search Template + +See {ref}/search-template.html[Search Template] documentation + +Define your template parameters as a `Map`: + +[source,java] +-------------------------------------------------- +Map template_params = new HashMap<>(); +template_params.put("param_gender", "male"); +-------------------------------------------------- + +You can use your stored search templates in `config/scripts`. +For example, if you have a file named `config/scripts/template_gender.mustache` containing: + +[source,js] +-------------------------------------------------- +{ + "template" : { + "query" : { + "match" : { + "gender" : "{{param_gender}}" + } + } + } +} +-------------------------------------------------- +// NOTCONSOLE + +Create your search template request: + +[source,java] +-------------------------------------------------- +SearchResponse sr = new SearchTemplateRequestBuilder(client) + .setScript("template_gender") <1> + .setScriptType(ScriptService.ScriptType.FILE) <2> + .setScriptParams(template_params) <3> + .setRequest(new SearchRequest()) <4> + .get() <5> + .getResponse(); <6> +-------------------------------------------------- +<1> template name +<2> template stored on disk in `gender_template.mustache` +<3> parameters +<4> set the execution context (ie. define the index name here) +<5> execute and get the template response +<6> get from the template response the search response itself + +You can also store your template in the cluster state: + +[source,java] +-------------------------------------------------- +client.admin().cluster().preparePutStoredScript() + .setScriptLang("mustache") + .setId("template_gender") + .setSource(new BytesArray( + "{\n" + + " \"template\" : {\n" + + " \"query\" : {\n" + + " \"match\" : {\n" + + " \"gender\" : \"{{param_gender}}\"\n" + + " }\n" + + " }\n" + + " }\n" + + "}")).get(); +-------------------------------------------------- + +To execute a stored templates, use `ScriptService.ScriptType.STORED`: + +[source,java] +-------------------------------------------------- +SearchResponse sr = new SearchTemplateRequestBuilder(client) + .setScript("template_gender") <1> + .setScriptType(ScriptService.ScriptType.STORED) <2> + .setScriptParams(template_params) <3> + .setRequest(new SearchRequest()) <4> + .get() <5> + .getResponse(); <6> +-------------------------------------------------- +<1> template name +<2> template stored in the cluster state +<3> parameters +<4> set the execution context (ie. define the index name here) +<5> execute and get the template response +<6> get from the template response the search response itself + +You can also execute inline templates: + +[source,java] +-------------------------------------------------- +sr = new SearchTemplateRequestBuilder(client) + .setScript("{\n" + <1> + " \"query\" : {\n" + + " \"match\" : {\n" + + " \"gender\" : \"{{param_gender}}\"\n" + + " }\n" + + " }\n" + + "}") + .setScriptType(ScriptService.ScriptType.INLINE) <2> + .setScriptParams(template_params) <3> + .setRequest(new SearchRequest()) <4> + .get() <5> + .getResponse(); <6> +-------------------------------------------------- +<1> template name +<2> template is passed inline +<3> parameters +<4> set the execution context (ie. define the index name here) +<5> execute and get the template response +<6> get from the template response the search response itself diff --git a/docs/reference/aggregations/metrics/max-aggregation.asciidoc b/docs/reference/aggregations/metrics/max-aggregation.asciidoc index 8cfc0bd998e..5527c08dd27 100644 --- a/docs/reference/aggregations/metrics/max-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/max-aggregation.asciidoc @@ -3,6 +3,10 @@ A `single-value` metrics aggregation that keeps track and returns the maximum value among the numeric values extracted from the aggregated documents. These values can be extracted either from specific numeric fields in the documents, or be generated by a provided script. +NOTE: The `min` and `max` aggregation operate on the `double` representation of +the data. As a consequence, the result may be approximate when running on longs +whose absolute value is greater than +2^53+. + Computing the max price value across all documents [source,js] diff --git a/docs/reference/aggregations/metrics/min-aggregation.asciidoc b/docs/reference/aggregations/metrics/min-aggregation.asciidoc index 819d70343fb..8cdd6f9e7db 100644 --- a/docs/reference/aggregations/metrics/min-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/min-aggregation.asciidoc @@ -3,6 +3,10 @@ A `single-value` metrics aggregation that keeps track and returns the minimum value among numeric values extracted from the aggregated documents. These values can be extracted either from specific numeric fields in the documents, or be generated by a provided script. +NOTE: The `min` and `max` aggregation operate on the `double` representation of +the data. As a consequence, the result may be approximate when running on longs +whose absolute value is greater than +2^53+. + Computing the min price value across all documents: [source,js] diff --git a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc index 2a629a88d64..f0ada57b37f 100644 --- a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -110,7 +110,7 @@ For more details on specifying scripts see <> analyzers. A _character filter_ receives the original text as a stream of characters and can transform the stream by adding, removing, or changing characters. For -instance, a character filter could be used to convert Arabic numerals -(٠‎١٢٣٤٥٦٧٨‎٩‎) into their Latin equivalents (0123456789), or to strip HTML +instance, a character filter could be used to convert Hindu-Arabic numerals +(٠‎١٢٣٤٥٦٧٨‎٩‎) into their Arabic-Latin equivalents (0123456789), or to strip HTML elements like `` from the stream. An analyzer may have *zero or more* <>, diff --git a/docs/reference/analysis/tokenfilters.asciidoc b/docs/reference/analysis/tokenfilters.asciidoc index 89cce11a615..227947fb45e 100644 --- a/docs/reference/analysis/tokenfilters.asciidoc +++ b/docs/reference/analysis/tokenfilters.asciidoc @@ -47,6 +47,8 @@ include::tokenfilters/phonetic-tokenfilter.asciidoc[] include::tokenfilters/synonym-tokenfilter.asciidoc[] +include::tokenfilters/synonym-graph-tokenfilter.asciidoc[] + include::tokenfilters/compound-word-tokenfilter.asciidoc[] include::tokenfilters/reverse-tokenfilter.asciidoc[] diff --git a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc new file mode 100644 index 00000000000..16758ad6ad2 --- /dev/null +++ b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc @@ -0,0 +1,152 @@ +[[analysis-synonym-graph-tokenfilter]] +=== Synonym Graph Token Filter + +experimental[] + +The `synonym_graph` token filter allows to easily handle synonyms, +including multi-word synonyms correctly during the analysis process. + +In order to properly handle multi-word synonyms this token filter +creates a "graph token stream" during processing. For more information +on this topic and it's various complexities, please read +http://blog.mikemccandless.com/2012/04/lucenes-tokenstreams-are-actually.html[Lucene's TokenStreams are actually graphs!] +by Michael McCandless. + +["NOTE",id="synonym-graph-index-note"] +=============================== +This token filter is designed to be used as part of a search analyzer +only. If you want to apply synonyms during indexing please use the +standard <>. +=============================== + +["NOTE",id="synonym-graph-query-note"] +=============================== +The graph token stream created by this token filter requires special +query handling. Currently only the <> and +<> queries can do this. Using +it with any other type of analyzed query will potentially result in +incorrect search results. +=============================== + +Synonyms are configured using a configuration file. +Here is an example: + +[source,js] +-------------------------------------------------- +{ + "index" : { + "analysis" : { + "analyzer" : { + "search_synonyms" : { + "tokenizer" : "whitespace", + "filter" : ["graph_synonyms"] + } + }, + "filter" : { + "graph_synonyms" : { + "type" : "synonym_graph", + "synonyms_path" : "analysis/synonym.txt" + } + } + } + } +} +-------------------------------------------------- + +The above configures a `search_synonyms` filter, with a path of +`analysis/synonym.txt` (relative to the `config` location). The +`search_synonyms` analyzer is then configured with the filter. +Additional settings are: `ignore_case` (defaults to `false`), and +`expand` (defaults to `true`). + +The `tokenizer` parameter controls the tokenizers that will be used to +tokenize the synonym, and defaults to the `whitespace` tokenizer. + +Two synonym formats are supported: Solr, WordNet. + +[float] +==== Solr synonyms + +The following is a sample format of the file: + +[source,js] +-------------------------------------------------- +# Blank lines and lines starting with pound are comments. + +# Explicit mappings match any token sequence on the LHS of "=>" +# and replace with all alternatives on the RHS. These types of mappings +# ignore the expand parameter in the schema. +# Examples: +i-pod, i pod => ipod, +sea biscuit, sea biscit => seabiscuit + +# Equivalent synonyms may be separated with commas and give +# no explicit mapping. In this case the mapping behavior will +# be taken from the expand parameter in the schema. This allows +# the same synonym file to be used in different synonym handling strategies. +# Examples: +ipod, i-pod, i pod +foozball , foosball +universe , cosmos +lol, laughing out loud + +# If expand==true, "ipod, i-pod, i pod" is equivalent +# to the explicit mapping: +ipod, i-pod, i pod => ipod, i-pod, i pod +# If expand==false, "ipod, i-pod, i pod" is equivalent +# to the explicit mapping: +ipod, i-pod, i pod => ipod + +# Multiple synonym mapping entries are merged. +foo => foo bar +foo => baz +# is equivalent to +foo => foo bar, baz +-------------------------------------------------- + +You can also define synonyms for the filter directly in the +configuration file (note use of `synonyms` instead of `synonyms_path`): + +[source,js] +-------------------------------------------------- +{ + "filter" : { + "synonym" : { + "type" : "synonym_graph", + "synonyms" : [ + "lol, laughing out loud", + "universe, cosmos" + ] + } + } +} +-------------------------------------------------- + +However, it is recommended to define large synonyms set in a file using +`synonyms_path`, because specifying them inline increases cluster size unnecessarily. + +[float] +==== WordNet synonyms + +Synonyms based on http://wordnet.princeton.edu/[WordNet] format can be +declared using `format`: + +[source,js] +-------------------------------------------------- +{ + "filter" : { + "synonym" : { + "type" : "synonym_graph", + "format" : "wordnet", + "synonyms" : [ + "s(100000001,1,'abstain',v,1,0).", + "s(100000001,2,'refrain',v,1,0).", + "s(100000001,3,'desist',v,1,0)." + ] + } + } +} +-------------------------------------------------- + +Using `synonyms_path` to define WordNet synonyms in a file is supported +as well. diff --git a/docs/reference/docs/multi-termvectors.asciidoc b/docs/reference/docs/multi-termvectors.asciidoc index 1c0ff7f9bb7..c3a414211dd 100644 --- a/docs/reference/docs/multi-termvectors.asciidoc +++ b/docs/reference/docs/multi-termvectors.asciidoc @@ -3,7 +3,8 @@ Multi termvectors API allows to get multiple termvectors at once. The documents from which to retrieve the term vectors are specified by an index, -type and id. But the documents could also be artificially provided +type and id. But the documents could also be artificially provided in the request itself. + The response includes a `docs` array with all the fetched termvectors, each element having the structure provided by the <> diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index f9025c378f6..dff3fa066da 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -227,6 +227,28 @@ POST _reindex // CONSOLE // TEST[setup:twitter] +The `source` section supports all the elements that are supported in a +<>. For instance only a subset of the +fields from the original documents can be reindexed using source filtering +as follows: + +[source,js] +-------------------------------------------------- +POST _reindex +{ + "source": { + "index": "twitter", + "_source": ["user", "tweet"] + }, + "dest": { + "index": "new_twitter" + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:twitter] + + Like `_update_by_query`, `_reindex` supports a script that modifies the document. Unlike `_update_by_query`, the script is allowed to modify the document's metadata. This example bumps the version of the source document: diff --git a/docs/reference/indices/put-mapping.asciidoc b/docs/reference/indices/put-mapping.asciidoc index 5105f82b091..d2e0bf5fd77 100644 --- a/docs/reference/indices/put-mapping.asciidoc +++ b/docs/reference/indices/put-mapping.asciidoc @@ -1,7 +1,7 @@ [[indices-put-mapping]] == Put Mapping -The PUT mapping API allows you to provide type mappings while creating a new index, add a new type to an existing index, or add new +The PUT mapping API allows you to add a new type to an existing index, or add new fields to an existing type: [source,js] @@ -62,6 +62,11 @@ PUT /{index}/_mapping/{type} * `{body}` contains the mapping changes that should be applied. +NOTE: When updating the `_default_` mapping with the +<> API, the new mapping is not merged with +the existing mapping. Instead, the new `_default_` mapping replaces the +existing one. + [[updating-field-mappings]] [float] === Updating field mappings diff --git a/docs/reference/mapping/dynamic/default-mapping.asciidoc b/docs/reference/mapping/dynamic/default-mapping.asciidoc index 8fcc9114dfc..de96667b7e3 100644 --- a/docs/reference/mapping/dynamic/default-mapping.asciidoc +++ b/docs/reference/mapping/dynamic/default-mapping.asciidoc @@ -32,6 +32,11 @@ PUT my_index <2> The `user` type inherits the settings from `_default_`. <3> The `blogpost` type overrides the defaults and enables the <> field. +NOTE: When updating the `_default_` mapping with the +<> API, the new mapping is not merged with +the existing mapping. Instead, the new `_default_` mapping replaces the +existing one. + While the `_default_` mapping can be updated after an index has been created, the new defaults will only affect mapping types that are created afterwards. diff --git a/docs/reference/mapping/types.asciidoc b/docs/reference/mapping/types.asciidoc index 8408dc89aea..23b73a6540a 100644 --- a/docs/reference/mapping/types.asciidoc +++ b/docs/reference/mapping/types.asciidoc @@ -12,6 +12,7 @@ string:: <> and <> <>:: `date` <>:: `boolean` <>:: `binary` +<>:: `integer_range`, `float_range`, `long_range`, `double_range`, `date_range` [float] === Complex datatypes @@ -55,6 +56,8 @@ include::types/array.asciidoc[] include::types/binary.asciidoc[] +include::types/range.asciidoc[] + include::types/boolean.asciidoc[] include::types/date.asciidoc[] diff --git a/docs/reference/mapping/types/range.asciidoc b/docs/reference/mapping/types/range.asciidoc new file mode 100644 index 00000000000..aa5943a21fb --- /dev/null +++ b/docs/reference/mapping/types/range.asciidoc @@ -0,0 +1,145 @@ +[[range]] +=== Range datatypes + +The following range types are supported: + +[horizontal] +`integer_range`:: A range of signed 32-bit integers with a minimum value of +-2^31^+ and maximum of +2^31^-1+. +`float_range`:: A range of single-precision 32-bit IEEE 754 floating point values. +`long_range`:: A range of signed 64-bit integers with a minimum value of +-2^63^+ and maximum of +2^63^-1+. +`double_range`:: A range of double-precision 64-bit IEEE 754 floating point values. +`date_range`:: A range of date values represented as unsigned 64-bit integer milliseconds elapsed since system epoch. + +Below is an example of configuring a mapping with various range fields followed by an example that indexes several range types. + +[source,js] +-------------------------------------------------- +PUT range_index +{ + "mappings": { + "my_type": { + "properties": { + "expected_attendees": { + "type": "integer_range" + }, + "time_frame": { + "type": "date_range", <1> + "format": "yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis" + } + } + } + } +} + +PUT range_index/my_type/1 +{ + "expected_attendees" : { <2> + "gte" : 10, + "lte" : 20 + }, + "time_frame" : { <3> + "gte" : "2015-10-31 12:00:00", <4> + "lte" : "2015-11-01" + } +} +-------------------------------------------------- +//CONSOLE + +The following is an example of a `date_range` query over the `date_range` field named "time_frame". + +[source,js] +-------------------------------------------------- +POST range_index/_search +{ + "query" : { + "range" : { + "time_frame" : { <5> + "gte" : "2015-10-31", + "lte" : "2015-11-01", + "relation" : "within" <6> + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:range_index] + +The result produced by the above query. + +[source,js] +-------------------------------------------------- +{ + "took": 13, + "timed_out": false, + "_shards" : { + "total": 2, + "successful": 2, + "failed": 0 + }, + "hits" : { + "total" : 1, + "max_score" : 1.0, + "hits" : [ + { + "_index" : "range_index", + "_type" : "my_type", + "_id" : "1", + "_score" : 1.0, + "_source" : { + "expected_attendees" : { + "gte" : 10, "lte" : 20 + }, + "time_frame" : { + "gte" : "2015-10-31 12:00:00", "lte" : "2015-11-01" + } + } + } + ] + } +} +-------------------------------------------------- +// TESTRESPONSE[s/"took": 13/"took" : $body.took/] + +<1> `date_range` types accept the same field parameters defined by the <> type. +<2> Example indexing a meeting with 10 to 20 attendees. +<3> Date ranges accept the same format as described in <>. +<4> Example date range using date time stamp. This also accepts <> formatting, or "now" for system time. +<5> Range queries work the same as described in <>. +<6> Range queries over range <> support a `relation` parameter which can be one of `WITHIN`, `CONTAINS`, + `INTERSECTS` (default). + +[[range-params]] +==== Parameters for range fields + +The following parameters are accepted by range types: + +[horizontal] + +<>:: + + Try to convert strings to numbers and truncate fractions for integers. + Accepts `true` (default) and `false`. + +<>:: + + Mapping field-level query time boosting. Accepts a floating point number, defaults + to `1.0`. + +<>:: + + Whether or not the field value should be included in the + <> field? Accepts `true` or `false`. Defaults + to `false` if <> is set to `false`, or if a parent + <> field sets `include_in_all` to `false`. + Otherwise defaults to `true`. + +<>:: + + Should the field be searchable? Accepts `true` (default) and `false`. + +<>:: + + Whether the field value should be stored and retrievable separately from + the <> field. Accepts `true` or `false` + (default). diff --git a/docs/reference/migration/migrate_6_0/rest.asciidoc b/docs/reference/migration/migrate_6_0/rest.asciidoc index e68682da9ed..5b425666823 100644 --- a/docs/reference/migration/migrate_6_0/rest.asciidoc +++ b/docs/reference/migration/migrate_6_0/rest.asciidoc @@ -17,3 +17,8 @@ The deprecated request parameters and plain text in request body has been remove The `ignore_unavailable` and `allow_no_indices` options are no longer accepted as they could cause undesired results when their values differed from their defaults. + +=== `timestamp` and `ttl` in index requests + +`timestamp` and `ttl` are not accepted anymore as parameters of index/update +requests. diff --git a/docs/reference/migration/migrate_6_0/search.asciidoc b/docs/reference/migration/migrate_6_0/search.asciidoc index cd39141095d..6910d37edaf 100644 --- a/docs/reference/migration/migrate_6_0/search.asciidoc +++ b/docs/reference/migration/migrate_6_0/search.asciidoc @@ -10,6 +10,18 @@ only the strings `"true"` and `"false"` will be parsed into their boolean counterparts. Other strings will cause an error to be thrown. +* The `in` query (a synonym for the `terms` query) has been removed + +* The `geo_bbox` query (a synonym for the `geo_bounding_box` query) has been removed + +* The `mlt` query (a synonym for the `more_like_this` query) has been removed + +* The `fuzzy_match` and `match_fuzzy` query (synonyma for the `match` query) have been removed + +* The `terms` query now always returns scores equal to `1` and is not subject to + `indices.query.bool.max_clause_count` anymore. + +* The deprecated `indices` query has been removed. ==== Search shards API diff --git a/docs/reference/migration/migrate_6_0/settings.asciidoc b/docs/reference/migration/migrate_6_0/settings.asciidoc index 2dcedc8d219..c2f20de7d21 100644 --- a/docs/reference/migration/migrate_6_0/settings.asciidoc +++ b/docs/reference/migration/migrate_6_0/settings.asciidoc @@ -14,3 +14,9 @@ Store throttling has been removed. As a consequence, the cluster settings and the `index.store.throttle.type` and `index.store.throttle.max_bytes_per_sec` index settings are not recognized anymore. + +==== Store settings + +The `default` `index.store.type` has been removed. If you were using it, we +advise that you simply remove it from your index settings and Elasticsearch +will use the best `store` implementation for your operating system. diff --git a/docs/reference/modules/cluster/allocation_awareness.asciidoc b/docs/reference/modules/cluster/allocation_awareness.asciidoc index f4e61fb0da1..8ffa4e6b06d 100644 --- a/docs/reference/modules/cluster/allocation_awareness.asciidoc +++ b/docs/reference/modules/cluster/allocation_awareness.asciidoc @@ -104,7 +104,7 @@ cluster.routing.allocation.awareness.attributes: zone Now, if we start 2 nodes with `node.attr.zone` set to `zone1` and create an index with 5 shards and 1 replica. The index will be created, but only the 5 primary -shards will be allocated (with no replicas). Only when we start more shards +shards will be allocated (with no replicas). Only when we start more nodes with `node.attr.zone` set to `zone2` will the replicas be allocated. The `cluster.routing.allocation.awareness.*` settings can all be updated diff --git a/docs/reference/modules/scripting/security.asciidoc b/docs/reference/modules/scripting/security.asciidoc index 072b954b72f..3b40937c2d8 100644 --- a/docs/reference/modules/scripting/security.asciidoc +++ b/docs/reference/modules/scripting/security.asciidoc @@ -218,7 +218,7 @@ Security Policy either: * system wide: `$JAVA_HOME/lib/security/java.policy`, * for just the `elasticsearch` user: `/home/elasticsearch/.java.policy` -* by adding a system property to the <> configuration: `-Djava.security.policy=someURL`, or +* by adding a system property to the <> configuration: `-Djava.security.policy=someURL`, or * via the `ES_JAVA_OPTS` environment variable with `-Djava.security.policy=someURL`: + [source,js] diff --git a/docs/reference/query-dsl/compound-queries.asciidoc b/docs/reference/query-dsl/compound-queries.asciidoc index 07624f02ab7..bee5787df1d 100644 --- a/docs/reference/query-dsl/compound-queries.asciidoc +++ b/docs/reference/query-dsl/compound-queries.asciidoc @@ -37,14 +37,9 @@ implemented with scripting. Return documents which match a `positive` query, but reduce the score of documents which also match a `negative` query. -<>:: - -Execute one query for the specified indices, and another for other indices. - include::constant-score-query.asciidoc[] include::bool-query.asciidoc[] include::dis-max-query.asciidoc[] include::function-score-query.asciidoc[] include::boosting-query.asciidoc[] -include::indices-query.asciidoc[] diff --git a/docs/reference/query-dsl/geo-distance-range-query.asciidoc b/docs/reference/query-dsl/geo-distance-range-query.asciidoc deleted file mode 100644 index c54c50638bc..00000000000 --- a/docs/reference/query-dsl/geo-distance-range-query.asciidoc +++ /dev/null @@ -1,43 +0,0 @@ -[[query-dsl-geo-distance-range-query]] -=== Geo Distance Range Query - -Filters documents that exists within a range from a specific point: - -[source,js] --------------------------------------------------- -GET /_search -{ - "query": { - "bool" : { - "must" : { - "match_all" : {} - }, - "filter" : { - "geo_distance_range" : { - "from" : "200km", - "to" : "400km", - "pin.location" : { - "lat" : 40, - "lon" : -70 - } - } - } - } - } -} --------------------------------------------------- -// CONSOLE - -Supports the same point location parameter and query options as the -<> -filter. And also support the common parameters for range (lt, lte, gt, -gte, from, to, include_upper and include_lower). - -[float] -==== Ignore Unmapped - -When set to `true` the `ignore_unmapped` option will ignore an unmapped field -and will not match any documents for this query. This can be useful when -querying multiple indexes which might have different mappings. When set to -`false` (the default value) the query will throw an exception if the field -is not mapped. diff --git a/docs/reference/query-dsl/geo-queries.asciidoc b/docs/reference/query-dsl/geo-queries.asciidoc index 0bdf70f5b8d..e60e4bcf615 100644 --- a/docs/reference/query-dsl/geo-queries.asciidoc +++ b/docs/reference/query-dsl/geo-queries.asciidoc @@ -22,11 +22,6 @@ The queries in this group are: Finds document with geo-points within the specified distance of a central point. -<> query:: - - Like the `geo_point` query, but the range starts at a specified distance - from the central point. - <> query:: Find documents with geo-points within the specified polygon. @@ -38,6 +33,4 @@ include::geo-bounding-box-query.asciidoc[] include::geo-distance-query.asciidoc[] -include::geo-distance-range-query.asciidoc[] - include::geo-polygon-query.asciidoc[] diff --git a/docs/reference/query-dsl/indices-query.asciidoc b/docs/reference/query-dsl/indices-query.asciidoc deleted file mode 100644 index 112a779e3f9..00000000000 --- a/docs/reference/query-dsl/indices-query.asciidoc +++ /dev/null @@ -1,33 +0,0 @@ -[[query-dsl-indices-query]] -=== Indices Query - -deprecated[5.0.0, Search on the '_index' field instead] - -The `indices` query is useful in cases where a search is executed across -multiple indices. It allows to specify a list of index names and an inner -query that is only executed for indices matching names on that list. -For other indices that are searched, but that don't match entries -on the list, the alternative `no_match_query` is executed. - -[source,js] --------------------------------------------------- -GET /_search -{ - "query": { - "indices" : { - "indices" : ["index1", "index2"], - "query" : { "term" : { "tag" : "wow" } }, - "no_match_query" : { "term" : { "tag" : "kow" } } - } - } -} --------------------------------------------------- -// CONSOLE -// TEST[warning:indices query is deprecated. Instead search on the '_index' field] - -You can use the `index` field to provide a single index. - -`no_match_query` can also have "string" value of `none` (to match no -documents), and `all` (to match all). Defaults to `all`. - -`query` is mandatory, as well as `indices` (or `index`). diff --git a/docs/reference/query-dsl/mlt-query.asciidoc b/docs/reference/query-dsl/mlt-query.asciidoc index 8e23afdbb86..e36025980d6 100644 --- a/docs/reference/query-dsl/mlt-query.asciidoc +++ b/docs/reference/query-dsl/mlt-query.asciidoc @@ -1,12 +1,11 @@ [[query-dsl-mlt-query]] === More Like This Query -The More Like This Query (MLT Query) finds documents that are "like" a given +The More Like This Query finds documents that are "like" a given set of documents. In order to do so, MLT selects a set of representative terms of these input documents, forms a query using these terms, executes the query and returns the results. The user controls the input documents, how the terms -should be selected and how the query is formed. `more_like_this` can be -shortened to `mlt` deprecated[5.0.0,use `more_like_this` instead]. +should be selected and how the query is formed. The simplest use case consists of asking for documents that are similar to a provided piece of text. Here, we are asking for all movies that have some text diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index f2eb8fb34ef..c726fe4445f 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -341,6 +341,8 @@ This will yield the following response. -------------------------------------------------- // TESTRESPONSE[s/"took": 7,/"took": "$body.took",/] +<1> The terms from each query have been highlighted in the document. + Instead of the query in the search request highlighting the percolator hits, the percolator queries are highlighting the document defined in the `percolate` query. diff --git a/docs/reference/query-dsl/query-string-query.asciidoc b/docs/reference/query-dsl/query-string-query.asciidoc index 28d7be357c9..2dcfcde1ca0 100644 --- a/docs/reference/query-dsl/query-string-query.asciidoc +++ b/docs/reference/query-dsl/query-string-query.asciidoc @@ -85,8 +85,8 @@ for exact matching. Look <> for a comprehensive example. |`split_on_whitespace` |Whether query text should be split on whitespace prior to analysis. - Instead the queryparser would parse around only real 'operators'. - Default to `false`. +Instead the queryparser would parse around only real 'operators'. Default to `false`. +It is not allowed to set this option to `false` if `autoGeneratePhraseQueries` is already set to `true`. |`all_fields` | Perform the query on all fields detected in the mapping that can be queried. Will be used by default when the `_all` field is disabled and no diff --git a/docs/reference/query-dsl/terms-query.asciidoc b/docs/reference/query-dsl/terms-query.asciidoc index e00c18bb56f..60ce0283a12 100644 --- a/docs/reference/query-dsl/terms-query.asciidoc +++ b/docs/reference/query-dsl/terms-query.asciidoc @@ -9,19 +9,12 @@ Filters documents that have fields that match any of the provided terms GET /_search { "query": { - "constant_score" : { - "filter" : { - "terms" : { "user" : ["kimchy", "elasticsearch"]} - } - } + "terms" : { "user" : ["kimchy", "elasticsearch"]} } } -------------------------------------------------- // CONSOLE -The `terms` query is also aliased with `in` as the filter name for -simpler usage deprecated[5.0.0,use `terms` instead]. - [float] [[query-dsl-terms-lookup]] ===== Terms lookup mechanism diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index d6870f81a10..ce01086b2ac 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -163,13 +163,6 @@ The `ids` filter has been replaced by the <>. It behaves as a query in ``query context'' and as a filter in ``filter context'' (see <>). -[role="exclude",id="query-dsl-indices-filter"] -=== Indices Filter - -The `indices` filter has been replaced by the <>. It behaves -as a query in ``query context'' and as a filter in ``filter context'' (see -<>). - [role="exclude",id="query-dsl-match-all-filter"] === Match All Filter @@ -254,6 +247,15 @@ The `fuzzy_like_this_field` or `flt_field` query has been removed. Instead use the <> parameter with the <> or the <>. +[role="exclude",id="query-dsl-geo-distance-range-query"] +=== Geo distance range Query + +The `geo_distance_range` query has been removed. Instead use the +<> with pagination +or the +<> +depending on your needs. + [role="exclude",id="query-dsl-geohash-cell-query"] === Geohash Cell Query diff --git a/docs/reference/search/explain.asciidoc b/docs/reference/search/explain.asciidoc index 1291af702f5..7185ddbb9b5 100644 --- a/docs/reference/search/explain.asciidoc +++ b/docs/reference/search/explain.asciidoc @@ -30,52 +30,102 @@ This will yield the following result: [source,js] -------------------------------------------------- { - "_index" : "twitter", - "_type" : "tweet", - "_id" : "0", - "matched" : true, - "explanation" : { - "value" : 1.55077, - "description" : "sum of:", - "details" : [ { - "value" : 1.55077, - "description" : "weight(message:elasticsearch in 0) [PerFieldSimilarity], result of:", - "details" : [ { - "value" : 1.55077, - "description" : "score(doc=0,freq=1.0 = termFreq=1.0\n), product of:", - "details" : [ { - "value" : 1.3862944, - "description" : "idf(docFreq=1, docCount=5)", - "details" : [ ] - }, { - "value" : 1.1186441, - "description" : "tfNorm, computed from:", - "details" : [ - { "value" : 1.0, "description" : "termFreq=1.0", "details" : [ ] }, - { "value" : 1.2, "description" : "parameter k1", "details" : [ ] }, - { "value" : 0.75, "description" : "parameter b", "details" : [ ] }, - { "value" : 5.4, "description" : "avgFieldLength", "details" : [ ] }, - { "value" : 4.0, "description" : "fieldLength", "details" : [ ] } - ] - } ] - } ] - }, { - "value" : 0.0, - "description" : "match on required clause, product of:", - "details" : [ { - "value" : 0.0, - "description" : "# clause", - "details" : [ ] - }, { - "value" : 1.0, - "description" : "*:*, product of:", - "details" : [ - { "value" : 1.0, "description" : "boost", "details" : [ ] }, - { "value" : 1.0, "description" : "queryNorm", "details" : [ ] } - ] - } ] - } ] - } + "_index": "twitter", + "_type": "tweet", + "_id": "0", + "matched": true, + "explanation": { + "value": 1.55077, + "description": "sum of:", + "details": [ + { + "value": 1.55077, + "description": "weight(message:elasticsearch in 0) [PerFieldSimilarity], result of:", + "details": [ + { + "value": 1.55077, + "description": "score(doc=0,freq=1.0 = termFreq=1.0\n), product of:", + "details": [ + { + "value": 1.3862944, + "description": "idf, computed as log(1 + (docCount - docFreq + 0.5) / (docFreq + 0.5)) from:", + "details": [ + { + "value": 1.0, + "description": "docFreq", + "details": [] + }, + { + "value": 5.0, + "description": "docCount", + "details": [] + } + ] + }, + { + "value": 1.1186441, + "description": "tfNorm, computed as (freq * (k1 + 1)) / (freq + k1 * (1 - b + b * fieldLength / avgFieldLength)) from:", + "details": [ + { + "value": 1.0, + "description": "termFreq=1.0", + "details": [] + }, + { + "value": 1.2, + "description": "parameter k1", + "details": [] + }, + { + "value": 0.75, + "description": "parameter b", + "details": [] + }, + { + "value": 5.4, + "description": "avgFieldLength", + "details": [] + }, + { + "value": 4.0, + "description": "fieldLength", + "details": [] + } + ] + } + ] + } + ] + }, + { + "value": 0.0, + "description": "match on required clause, product of:", + "details": [ + { + "value": 0.0, + "description": "# clause", + "details": [] + }, + { + "value": 1.0, + "description": "*:*, product of:", + "details": [ + { + "value": 1.0, + "description": "boost", + "details": [] + }, + { + "value": 1.0, + "description": "queryNorm", + "details": [] + } + ] + } + ] + } + ] + } } -------------------------------------------------- // TESTRESPONSE diff --git a/docs/reference/search/suggesters/term-suggest.asciidoc b/docs/reference/search/suggesters/term-suggest.asciidoc index ff5cd38cf21..f76b17e0ed2 100644 --- a/docs/reference/search/suggesters/term-suggest.asciidoc +++ b/docs/reference/search/suggesters/term-suggest.asciidoc @@ -45,7 +45,7 @@ doesn't take the query into account that is part of request. + ** `missing`: Only provide suggestions for suggest text terms that are not in the index. This is the default. - ** `popular`: Only suggest suggestions that occur in more docs then + ** `popular`: Only suggest suggestions that occur in more docs than the original suggest text term. ** `always`: Suggest any matching suggestions based on terms in the suggest text. diff --git a/docs/reference/setup.asciidoc b/docs/reference/setup.asciidoc index ae3c5b3beb4..2608f461dba 100644 --- a/docs/reference/setup.asciidoc +++ b/docs/reference/setup.asciidoc @@ -34,6 +34,10 @@ refuse to start if a known-bad version of Java is used. The version of Java that Elasticsearch will use can be configured by setting the `JAVA_HOME` environment variable. +NOTE: Elasticsearch ships with default configuration for running Elasticsearch on 64-bit server JVMs. If you are using a 32-bit client JVM, +you must remove `-server` from <> and if you are using any 32-bit JVM you should reconfigure the thread stack size +from `-Xss1m` to `-Xss320k`. + -- include::setup/install.asciidoc[] diff --git a/docs/reference/setup/install/deb.asciidoc b/docs/reference/setup/install/deb.asciidoc index c1d0425b430..6ca635ce06e 100644 --- a/docs/reference/setup/install/deb.asciidoc +++ b/docs/reference/setup/install/deb.asciidoc @@ -96,6 +96,7 @@ Examine +/etc/apt/sources.list.d/elasticsearch-{major-version}.list+ for the dup endif::[] +include::skip-set-kernel-parameters.asciidoc[] [[install-deb]] ==== Download and install the Debian package manually diff --git a/docs/reference/setup/install/rpm.asciidoc b/docs/reference/setup/install/rpm.asciidoc index acf9d6f2418..b737fbff127 100644 --- a/docs/reference/setup/install/rpm.asciidoc +++ b/docs/reference/setup/install/rpm.asciidoc @@ -112,6 +112,8 @@ sudo rpm --install elasticsearch-{version}.rpm endif::[] +include::skip-set-kernel-parameters.asciidoc[] + include::init-systemd.asciidoc[] [[rpm-running-init]] diff --git a/docs/reference/setup/install/skip-set-kernel-parameters.asciidoc b/docs/reference/setup/install/skip-set-kernel-parameters.asciidoc new file mode 100644 index 00000000000..56406fce987 --- /dev/null +++ b/docs/reference/setup/install/skip-set-kernel-parameters.asciidoc @@ -0,0 +1,2 @@ +NOTE: On systemd-based distributions, the installation scripts will attempt to set kernel parameters (e.g., +`vm.max_map_count`); you can skip this by setting the environment variable `ES_SKIP_SET_KERNEL_PARAMETERS` to `true`. diff --git a/docs/reference/setup/install/windows.asciidoc b/docs/reference/setup/install/windows.asciidoc index d33558b913f..37d06e18d68 100644 --- a/docs/reference/setup/install/windows.asciidoc +++ b/docs/reference/setup/install/windows.asciidoc @@ -119,11 +119,6 @@ Using JAVA_HOME (64-bit): "c:\jvm\jdk1.8" The service 'elasticsearch-service-x64' has been installed. -------------------------------------------------- -NOTE: The service installer requires that the thread stack size setting -be configured in jvm.options *before* you install the service. On -32-bit Windows, you should add `-Xss320k` to the jvm.options file, and -on 64-bit Windows you should add `-Xss1m` to the jvm.options file. - NOTE: While a JRE can be used for the Elasticsearch service, due to its use of a client VM (as opposed to a server JVM which offers better performance for long-running applications) its usage is discouraged and a warning will be issued. NOTE: Upgrading (or downgrading) JVM versions does not require the service to be reinstalled. However, upgrading across JVM types (e.g. JRE versus SE) is not supported, and does require the service to be reinstalled. diff --git a/docs/reference/setup/sysconfig/configuring.asciidoc b/docs/reference/setup/sysconfig/configuring.asciidoc index 84d0f832141..72f915cff61 100644 --- a/docs/reference/setup/sysconfig/configuring.asciidoc +++ b/docs/reference/setup/sysconfig/configuring.asciidoc @@ -93,7 +93,7 @@ The systemd service file (`/usr/lib/systemd/system/elasticsearch.service`) contains the limits that are applied by default. To override these, add a file called -`/etc/systemd/system/elasticsearch.service.d/elasticsearch.conf`) and specify +`/etc/systemd/system/elasticsearch.service.d/elasticsearch.conf` and specify any changes in that file, such as: [source,sh] @@ -101,8 +101,8 @@ any changes in that file, such as: LimitMEMLOCK=infinity --------------------------------- -[[es-java-opts]] -==== Setting JVM system properties +[[jvm-options]] +==== Setting JVM options The preferred method of setting Java Virtual Machine options (including system properties and JVM flags) is via the `jvm.options` configuration diff --git a/docs/reference/setup/sysconfig/heap_size.asciidoc b/docs/reference/setup/sysconfig/heap_size.asciidoc index 55fb95bc7e6..30023618f3f 100644 --- a/docs/reference/setup/sysconfig/heap_size.asciidoc +++ b/docs/reference/setup/sysconfig/heap_size.asciidoc @@ -6,7 +6,7 @@ and maximum size of 2 GB. When moving to production, it is important to configure heap size to ensure that Elasticsearch has enough heap available. -Elasticsearch will assign the entire heap specified in <> +Elasticsearch will assign the entire heap specified in <> via the Xms (minimum heap size) and Xmx (maximum heap size) settings. The value for these setting depends on the amount of RAM available on diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java index c546f021823..19d791dd864 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java @@ -38,7 +38,7 @@ public class DateIndexNameProcessorTests extends ESTestCase { "events-", "y", "yyyyMMdd" ); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null, + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z")); processor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); @@ -48,7 +48,7 @@ public class DateIndexNameProcessorTests extends ESTestCase { Function function = DateFormat.Tai64n.getFunction(null, DateTimeZone.UTC, null); DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null, + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024")); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); @@ -58,7 +58,7 @@ public class DateIndexNameProcessorTests extends ESTestCase { Function function = DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null); DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null, + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, Collections.singletonMap("_field", "1000500")); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); @@ -68,7 +68,7 @@ public class DateIndexNameProcessorTests extends ESTestCase { Function function = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null); DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); - IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null, + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, Collections.singletonMap("_field", "1000.5")); dateProcessor.execute(document); assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index edbc0015699..2baa32b3d51 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -45,7 +45,7 @@ public class ForEachProcessorTests extends ESTestCase { values.add("bar"); values.add("baz"); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values) + "_index", "_type", "_id", null, null, Collections.singletonMap("values", values) ); ForEachProcessor processor = new ForEachProcessor( @@ -61,7 +61,7 @@ public class ForEachProcessorTests extends ESTestCase { public void testExecuteWithFailure() throws Exception { IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", Arrays.asList("a", "b", "c")) + "_index", "_type", "_id", null, null, Collections.singletonMap("values", Arrays.asList("a", "b", "c")) ); TestProcessor testProcessor = new TestProcessor(id -> { @@ -101,7 +101,7 @@ public class ForEachProcessorTests extends ESTestCase { values.add(new HashMap<>()); values.add(new HashMap<>()); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values) + "_index", "_type", "_id", null, null, Collections.singletonMap("values", values) ); TestProcessor innerProcessor = new TestProcessor(id -> { @@ -132,7 +132,7 @@ public class ForEachProcessorTests extends ESTestCase { document.put("values", values); document.put("flat_values", new ArrayList<>()); document.put("other", "value"); - IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, document); TemplateService ts = TestTemplateService.instance(); ForEachProcessor processor = new ForEachProcessor( @@ -171,7 +171,7 @@ public class ForEachProcessorTests extends ESTestCase { values.add(""); } IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values) + "_index", "_type", "_id", null, null, Collections.singletonMap("values", values) ); ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor); @@ -190,7 +190,7 @@ public class ForEachProcessorTests extends ESTestCase { values.add(1); values.add(null); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values) + "_index", "_type", "_id", null, null, Collections.singletonMap("values", values) ); TemplateService ts = TestTemplateService.instance(); @@ -221,7 +221,7 @@ public class ForEachProcessorTests extends ESTestCase { source.put("_value", "new_value"); source.put("values", values); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, source + "_index", "_type", "_id", null, null, source ); TestProcessor processor = new TestProcessor(doc -> doc.setFieldValue("_ingest._value", @@ -252,7 +252,7 @@ public class ForEachProcessorTests extends ESTestCase { values.add(value); IngestDocument ingestDocument = new IngestDocument( - "_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values1", values) + "_index", "_type", "_id", null, null, Collections.singletonMap("values1", values) ); TestProcessor testProcessor = new TestProcessor( diff --git a/modules/lang-expression/licenses/lucene-expressions-6.3.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.3.0.jar.sha1 deleted file mode 100644 index 8fca696518d..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f9847cdbdd355f9f96993c4c322d6b453f4e84a8 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-6.4.0-snapshot-ec38570.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..b68a4d5cbd7 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +031d34e0a604a7cbb5c8ba816d49d9f622adaa3f \ No newline at end of file diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java index 36ce55067f4..c78825758e2 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngineService.java @@ -34,7 +34,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; -import org.elasticsearch.index.mapper.LegacyDateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.script.ClassPermission; @@ -200,8 +199,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements } else { valueSource = GeoField.getMethod(fieldData, fieldname, methodname); } - } else if (fieldType instanceof LegacyDateFieldMapper.DateFieldType || - fieldType instanceof DateFieldMapper.DateFieldType) { + } else if (fieldType instanceof DateFieldMapper.DateFieldType) { if (dateAccessor) { // date object if (methodname == null) { diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java index 93549d1d791..178eb3e290d 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java @@ -22,7 +22,6 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; -import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -61,11 +60,6 @@ public class MultiSearchTemplateRequest extends ActionRequest implements Composi return this.requests; } - @Override - public List subRequests() { - return requests; - } - @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java index b405d0950e7..8ff30fb0e5b 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateRequest.java @@ -21,10 +21,8 @@ package org.elasticsearch.script.mustache; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.script.ScriptType; @@ -37,7 +35,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; /** * A request to execute a search based on a search template. */ -public class SearchTemplateRequest extends ActionRequest implements IndicesRequest { +public class SearchTemplateRequest extends ActionRequest implements CompositeIndicesRequest { private SearchRequest request; private boolean simulate = false; @@ -165,14 +163,4 @@ public class SearchTemplateRequest extends ActionRequest implements IndicesReque out.writeMap(scriptParams); } } - - @Override - public String[] indices() { - return request != null ? request.indices() : Strings.EMPTY_ARRAY; - } - - @Override - public IndicesOptions indicesOptions() { - return request != null ? request.indicesOptions() : SearchRequest.DEFAULT_INDICES_OPTIONS; - } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java index e3b633006c5..bec7e24d3a4 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestTests.java @@ -41,16 +41,16 @@ public class MultiSearchTemplateRequestTests extends ESTestCase { assertThat(request.requests().size(), equalTo(3)); assertThat(request.requests().get(0).getRequest().indices()[0], equalTo("test0")); assertThat(request.requests().get(0).getRequest().indices()[1], equalTo("test1")); - assertThat(request.requests().get(0).indices(), arrayContaining("test0", "test1")); + assertThat(request.requests().get(0).getRequest().indices(), arrayContaining("test0", "test1")); assertThat(request.requests().get(0).getRequest().requestCache(), equalTo(true)); assertThat(request.requests().get(0).getRequest().preference(), nullValue()); - assertThat(request.requests().get(1).indices()[0], equalTo("test2")); - assertThat(request.requests().get(1).indices()[1], equalTo("test3")); + assertThat(request.requests().get(1).getRequest().indices()[0], equalTo("test2")); + assertThat(request.requests().get(1).getRequest().indices()[1], equalTo("test3")); assertThat(request.requests().get(1).getRequest().types()[0], equalTo("type1")); assertThat(request.requests().get(1).getRequest().requestCache(), nullValue()); assertThat(request.requests().get(1).getRequest().preference(), equalTo("_local")); - assertThat(request.requests().get(2).indices()[0], equalTo("test4")); - assertThat(request.requests().get(2).indices()[1], equalTo("test1")); + assertThat(request.requests().get(2).getRequest().indices()[0], equalTo("test4")); + assertThat(request.requests().get(2).getRequest().indices()[1], equalTo("test1")); assertThat(request.requests().get(2).getRequest().types()[0], equalTo("type2")); assertThat(request.requests().get(2).getRequest().types()[1], equalTo("type1")); assertThat(request.requests().get(2).getRequest().routing(), equalTo("123")); diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java index 7bff3f59842..57b9ad622da 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/TemplateQueryBuilderTests.java @@ -104,7 +104,7 @@ public class TemplateQueryBuilderTests extends AbstractQueryTestCase subRequests() { - return requests; - } - private void parsePercolateAction(XContentParser parser, PercolateRequest percolateRequest, boolean allowExplicitIndex) throws IOException { String globalIndex = indices != null && indices.length > 0 ? indices[0] : null; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 383cf959c13..4359205b175 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.Fields; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; @@ -357,7 +358,7 @@ public class PercolatorFieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { throw new UnsupportedOperationException("should not be invoked"); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index 97535b42093..e1589cba14b 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -226,7 +226,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase copyMetadata(RequestWrapper request, ScrollableHitSource.Hit doc) { request.setParent(doc.getParent()); copyRouting(request, doc.getRouting()); - - // Comes back as a Long but needs to be a string - Long timestamp = doc.getTimestamp(); - if (timestamp != null) { - request.setTimestamp(timestamp.toString()); - } - Long ttl = doc.getTTL(); - if (ttl != null) { - request.setTtl(ttl); - } return request; } @@ -184,10 +171,6 @@ public abstract class AbstractAsyncBulkIndexByScrollAction source); Map getSource(); @@ -271,20 +254,6 @@ public abstract class AbstractAsyncBulkIndexByScrollAction getSource() { return request.sourceAsMap(); @@ -384,16 +353,6 @@ public abstract class AbstractAsyncBulkIndexByScrollAction getSource() { throw new UnsupportedOperationException("unable to get source from action request [" + request.getClass() + "]"); @@ -463,10 +422,6 @@ public abstract class AbstractAsyncBulkIndexByScrollAction request, Object to); - protected abstract void scriptChangedTimestamp(RequestWrapper request, Object to); - - protected abstract void scriptChangedTTL(RequestWrapper request, Object to); - } public enum OpType { diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java index 13c7d1e9a99..f895ec1acc4 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequest.java @@ -402,7 +402,7 @@ public abstract class AbstractBulkByScrollRequest 1) { throw new IllegalArgumentException("Attempting to send sliced reindex-style request to a node that doesn't support " - + "it. Version is [" + out.getVersion() + "] but must be [" + Version.V_5_1_0_UNRELEASED + "]"); + + "it. Version is [" + out.getVersion() + "] but must be [" + Version.V_5_1_1_UNRELEASED + "]"); } } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java index 77f8128e9ac..2662f474bfd 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java @@ -186,7 +186,7 @@ public abstract class BulkByScrollTask extends CancellableTask { } public Status(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { sliceId = in.readOptionalVInt(); } else { sliceId = null; @@ -204,7 +204,7 @@ public abstract class BulkByScrollTask extends CancellableTask { requestsPerSecond = in.readFloat(); reasonCancelled = in.readOptionalString(); throttledUntil = new TimeValue(in); - if (in.getVersion().onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (in.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { sliceStatuses = in.readList(stream -> stream.readOptionalWriteable(StatusOrException::new)); } else { sliceStatuses = emptyList(); @@ -213,7 +213,7 @@ public abstract class BulkByScrollTask extends CancellableTask { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { out.writeOptionalVInt(sliceId); } out.writeVLong(total); @@ -229,7 +229,7 @@ public abstract class BulkByScrollTask extends CancellableTask { out.writeFloat(requestsPerSecond); out.writeOptionalString(reasonCancelled); throttledUntil.writeTo(out); - if (out.getVersion().onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (out.getVersion().onOrAfter(Version.V_5_1_1_UNRELEASED)) { out.writeVInt(sliceStatuses.size()); for (StatusOrException sliceStatus : sliceStatuses) { out.writeOptionalWriteable(sliceStatus); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index 7f7ae52b73b..09423b2cca8 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -39,8 +39,6 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHitField; import org.elasticsearch.threadpool.ThreadPool; @@ -237,16 +235,6 @@ public class ClientScrollableHitSource extends ScrollableHitSource { return fieldValue(RoutingFieldMapper.NAME); } - @Override - public Long getTimestamp() { - return fieldValue(TimestampFieldMapper.NAME); - } - - @Override - public Long getTTL() { - return fieldValue(TTLFieldMapper.NAME); - } - private T fieldValue(String fieldName) { SearchHitField field = delegate.field(fieldName); return field == null ? null : field.value(); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java index 40aa745d06a..618db3dfa48 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ReindexRequest.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; -import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.io.stream.StreamInput; @@ -31,11 +30,7 @@ import org.elasticsearch.index.reindex.remote.RemoteInfo; import org.elasticsearch.tasks.TaskId; import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import static java.util.Collections.singletonList; -import static java.util.Collections.unmodifiableList; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.index.VersionType.INTERNAL; @@ -93,12 +88,6 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequestnot - * accurate since it returns a prototype {@link IndexRequest} and not the actual requests that will be issued as part of the - * execution of this request. Additionally, scripts can modify the underlying {@link IndexRequest} and change values such as the index, - * type, {@link org.elasticsearch.action.support.IndicesOptions}. In short - only use this for very course reasoning about the request. - * - * @return a list comprising of the {@link SearchRequest} and the prototype {@link IndexRequest} - */ - @Override - public List subRequests() { - assert getSearchRequest() != null; - assert getDestination() != null; - if (remoteInfo != null) { - return singletonList(getDestination()); - } - return unmodifiableList(Arrays.asList(getSearchRequest(), getDestination())); - } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index 14fd6cb9031..54746bc74b8 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -96,11 +96,6 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler s.versionType(VersionType.fromString(i)), new ParseField("version_type")); - // These exist just so the user can get a nice validation error: - destParser.declareString(IndexRequest::timestamp, new ParseField("timestamp")); - destParser.declareString((i, ttl) -> i.ttl(parseTimeValue(ttl, TimeValue.timeValueMillis(-1), "ttl").millis()), - new ParseField("ttl")); - PARSER.declareField((p, v, c) -> sourceParser.parse(p, v, c), new ParseField("source"), ValueType.OBJECT); PARSER.declareField((p, v, c) -> destParser.parse(p, v.getDestination(), c), new ParseField("dest"), ValueType.OBJECT); PARSER.declareInt(ReindexRequest::setSize, new ParseField("size")); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java index 0b4b66222bc..bf13d6d72e2 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/ScrollableHitSource.java @@ -187,14 +187,6 @@ public abstract class ScrollableHitSource implements Closeable { * The routing on the hit if there is any or null if there isn't. */ @Nullable String getRouting(); - /** - * The {@code _timestamp} on the hit if one was stored with the hit or null if one wasn't. - */ - @Nullable Long getTimestamp(); - /** - * The {@code _ttl} on the hit if one was set on it or null one wasn't. - */ - @Nullable Long getTTL(); } /** @@ -210,8 +202,6 @@ public abstract class ScrollableHitSource implements Closeable { private BytesReference source; private String parent; private String routing; - private Long timestamp; - private Long ttl; public BasicHit(String index, String type, String id, long version) { this.index = index; @@ -269,26 +259,6 @@ public abstract class ScrollableHitSource implements Closeable { this.routing = routing; return this; } - - @Override - public Long getTimestamp() { - return timestamp; - } - - public BasicHit setTimestamp(Long timestamp) { - this.timestamp = timestamp; - return this; - } - - @Override - public Long getTTL() { - return ttl; - } - - public BasicHit setTTL(Long ttl) { - this.ttl = ttl; - return this; - } } /** diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 96f9061c216..7ae0d715ed0 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -56,7 +56,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.VersionType; -import org.elasticsearch.index.mapper.TTLFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.index.reindex.remote.RemoteInfo; @@ -319,8 +318,6 @@ public class TransportReindexAction extends HandledTransportAction request, Object to) { - request.setTimestamp(Objects.toString(to, null)); - } - - @Override - protected void scriptChangedTTL(RequestWrapper request, Object to) { - if (to == null) { - request.setTtl(null); - } else { - request.setTtl(asLong(to, TTLFieldMapper.NAME)); - } - } - private long asLong(Object from, String name) { /* * Stuffing a number into the map will have converted it to diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java index d8ca0441023..009ffabb73b 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportUpdateByQueryAction.java @@ -36,8 +36,6 @@ import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.ParentFieldMapper; import org.elasticsearch.index.mapper.RoutingFieldMapper; -import org.elasticsearch.index.mapper.TTLFieldMapper; -import org.elasticsearch.index.mapper.TimestampFieldMapper; import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; @@ -161,15 +159,6 @@ public class TransportUpdateByQueryAction extends HandledTransportAction request, Object to) { - throw new IllegalArgumentException("Modifying [" + TimestampFieldMapper.NAME + "] not allowed"); - } - - @Override - protected void scriptChangedTTL(RequestWrapper request, Object to) { - throw new IllegalArgumentException("Modifying [" + TTLFieldMapper.NAME + "] not allowed"); - } } } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java index 4583e4c8c05..9f1467d7cfd 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteResponseParsers.java @@ -88,23 +88,19 @@ final class RemoteResponseParsers { ParseField ttlField = new ParseField("_ttl"); HIT_PARSER.declareString(BasicHit::setRouting, routingField); HIT_PARSER.declareString(BasicHit::setParent, parentField); - HIT_PARSER.declareLong(BasicHit::setTTL, ttlField); - HIT_PARSER.declareLong(BasicHit::setTimestamp, new ParseField("_timestamp")); // Pre-2.0.0 parent and routing come back in "fields" class Fields { String routing; String parent; - long ttl; } ObjectParser fieldsParser = new ObjectParser<>("fields", Fields::new); HIT_PARSER.declareObject((hit, fields) -> { hit.setRouting(fields.routing); hit.setParent(fields.parent); - hit.setTTL(fields.ttl); }, fieldsParser, new ParseField("fields")); fieldsParser.declareString((fields, routing) -> fields.routing = routing, routingField); fieldsParser.declareString((fields, parent) -> fields.parent = parent, parentField); - fieldsParser.declareLong((fields, ttl) -> fields.ttl = ttl, ttlField); + fieldsParser.declareLong((fields, ttl) -> {}, ttlField); // ignore ttls since they have been removed } /** diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java index 4cc10334223..cb9ec0c273b 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AbstractAsyncBulkIndexbyScrollActionMetadataTestCase.java @@ -19,10 +19,6 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.index.IndexRequest; - -import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; - public abstract class AbstractAsyncBulkIndexbyScrollActionMetadataTestCase< Request extends AbstractBulkIndexByScrollRequest, Response extends BulkIndexByScrollResponse> @@ -32,17 +28,5 @@ public abstract class AbstractAsyncBulkIndexbyScrollActionMetadataTestCase< return new ScrollableHitSource.BasicHit("index", "type", "id", 0); } - public void testTimestampIsCopied() { - IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setTimestamp(10L)); - assertEquals("10", index.timestamp()); - } - - public void testTTL() throws Exception { - IndexRequest index = new IndexRequest(); - action().copyMetadata(AbstractAsyncBulkIndexByScrollAction.wrap(index), doc().setTTL(10L)); - assertEquals(timeValueMillis(10), index.ttl()); - } - protected abstract AbstractAsyncBulkIndexByScrollAction action(); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java index 9bfa41da7f3..d089f0427c3 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFailureTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; import org.elasticsearch.action.index.IndexRequestBuilder; @@ -119,28 +118,6 @@ public class ReindexFailureTests extends ReindexTestCase { assumeFalse("Wasn't able to trigger a reindex failure in " + attempt + " attempts.", true); } - public void testSettingTtlIsValidationFailure() throws Exception { - indexDocs(1); - ReindexRequestBuilder copy = reindex().source("source").destination("dest"); - copy.destination().setTTL(123); - try { - copy.get(); - } catch (ActionRequestValidationException e) { - assertThat(e.getMessage(), containsString("setting ttl on destination isn't supported. use scripts instead.")); - } - } - - public void testSettingTimestampIsValidationFailure() throws Exception { - indexDocs(1); - ReindexRequestBuilder copy = reindex().source("source").destination("dest"); - copy.destination().setTimestamp("now"); - try { - copy.get(); - } catch (ActionRequestValidationException e) { - assertThat(e.getMessage(), containsString("setting timestamp on destination isn't supported. use scripts instead.")); - } - } - private void indexDocs(int count) throws Exception { List docs = new ArrayList(count); for (int i = 0; i < count; i++) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java index 559d0b54565..30ba03aca76 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java @@ -34,14 +34,6 @@ import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; * Tests some of the validation of {@linkplain ReindexRequest}. See reindex's rest tests for much more. */ public class ReindexRequestTests extends AbstractBulkByScrollRequestTestCase { - public void testTimestampAndTtlNotAllowed() { - ReindexRequest reindex = newRequest(); - reindex.getDestination().ttl("1s").timestamp("now"); - ActionRequestValidationException e = reindex.validate(); - assertEquals("Validation Failed: 1: setting ttl on destination isn't supported. use scripts instead.;" - + "2: setting timestamp on destination isn't supported. use scripts instead.;", - e.getMessage()); - } public void testReindexFromRemoteDoesNotSupportSearchQuery() { ReindexRequest reindex = newRequest(); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java index c70b80b8e37..66b681b1494 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexScriptTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.script.ScriptService; import java.util.Map; -import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; import static org.hamcrest.Matchers.containsString; /** @@ -104,32 +103,6 @@ public class ReindexScriptTests extends AbstractAsyncBulkIndexByScrollActionScri assertEquals(routing, index.routing()); } - public void testSetTimestamp() throws Exception { - String timestamp = randomFrom("now", "1234", null); - IndexRequest index = applyScript((Map ctx) -> ctx.put("_timestamp", timestamp)); - assertEquals(timestamp, index.timestamp()); - } - - public void testSetTtl() throws Exception { - Number ttl = randomFrom(new Number[] { null, 1233214, 134143797143L }); - IndexRequest index = applyScript((Map ctx) -> ctx.put("_ttl", ttl)); - if (ttl == null) { - assertEquals(null, index.ttl()); - } else { - assertEquals(timeValueMillis(ttl.longValue()), index.ttl()); - } - } - - public void testSettingTtlToJunkIsAnError() throws Exception { - Object junkTtl = randomFrom(new Object[] { "junk", Math.PI }); - try { - applyScript((Map ctx) -> ctx.put("_ttl", junkTtl)); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("_ttl may only be set to an int or a long but was [")); - assertThat(e.getMessage(), containsString(junkTtl.toString())); - } - } - @Override protected ReindexRequest request() { return new ReindexRequest(new SearchRequest(), new IndexRequest()); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index 43e5c7ef5a3..8e34df3ec36 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -47,7 +47,6 @@ import java.util.stream.IntStream; import static java.lang.Math.abs; import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static java.util.stream.Collectors.toList; import static org.apache.lucene.util.TestUtil.randomSimpleString; @@ -84,7 +83,7 @@ public class RoundTripTests extends ESTestCase { reindex.setSlices(between(2, 1000)); Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_5_0_0_rc1, reindex, null)); assertEquals("Attempting to send sliced reindex-style request to a node that doesn't support it. " - + "Version is [5.0.0-rc1] but must be [5.1.0]", e.getMessage()); + + "Version is [5.0.0-rc1] but must be [5.1.1]", e.getMessage()); // Try without slices with a version that doesn't support slices. That should work. tripped = new ReindexRequest(); @@ -108,7 +107,7 @@ public class RoundTripTests extends ESTestCase { update.setSlices(between(2, 1000)); Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_5_0_0_rc1, update, null)); assertEquals("Attempting to send sliced reindex-style request to a node that doesn't support it. " - + "Version is [5.0.0-rc1] but must be [5.1.0]", e.getMessage()); + + "Version is [5.0.0-rc1] but must be [5.1.1]", e.getMessage()); // Try without slices with a version that doesn't support slices. That should work. tripped = new UpdateByQueryRequest(); @@ -129,7 +128,7 @@ public class RoundTripTests extends ESTestCase { delete.setSlices(between(2, 1000)); Exception e = expectThrows(IllegalArgumentException.class, () -> roundTrip(Version.V_5_0_0_rc1, delete, null)); assertEquals("Attempting to send sliced reindex-style request to a node that doesn't support it. " - + "Version is [5.0.0-rc1] but must be [5.1.0]", e.getMessage()); + + "Version is [5.0.0-rc1] but must be [5.1.1]", e.getMessage()); // Try without slices with a version that doesn't support slices. That should work. tripped = new DeleteByQueryRequest(); @@ -356,7 +355,7 @@ public class RoundTripTests extends ESTestCase { assertEquals(expected.getRequestsPerSecond(), actual.getRequestsPerSecond(), 0f); assertEquals(expected.getReasonCancelled(), actual.getReasonCancelled()); assertEquals(expected.getThrottledUntil(), actual.getThrottledUntil()); - if (version.onOrAfter(Version.V_5_1_0_UNRELEASED)) { + if (version.onOrAfter(Version.V_5_1_1_UNRELEASED)) { assertThat(actual.getSliceStatuses(), hasSize(expected.getSliceStatuses().size())); for (int i = 0; i < expected.getSliceStatuses().size(); i++) { BulkByScrollTask.StatusOrException sliceStatus = expected.getSliceStatuses().get(i); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java index c5b9d4da64f..5ff54e4e06d 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryWithScriptTests.java @@ -38,7 +38,7 @@ public class UpdateByQueryWithScriptTests * error message to the user, not some ClassCastException. */ Object[] options = new Object[] {"cat", new Object(), 123, new Date(), Math.PI}; - for (String ctxVar: new String[] {"_index", "_type", "_id", "_version", "_parent", "_routing", "_timestamp", "_ttl"}) { + for (String ctxVar: new String[] {"_index", "_type", "_id", "_version", "_parent", "_routing"}) { try { applyScript((Map ctx) -> ctx.put(ctxVar, randomFrom(options))); } catch (IllegalArgumentException e) { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index 351f9392656..f898483a124 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -169,8 +169,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiC250DjIiBO3yJ_", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test2\"}", r.getHits().get(0).getSource().utf8ToString()); - assertNull(r.getHits().get(0).getTTL()); - assertNull(r.getHits().get(0).getTimestamp()); assertNull(r.getHits().get(0).getRouting()); called.set(true); }); @@ -189,8 +187,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); - assertNull(r.getHits().get(0).getTTL()); - assertNull(r.getHits().get(0).getTimestamp()); assertNull(r.getHits().get(0).getRouting()); called.set(true); }); @@ -205,8 +201,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { sourceWithMockedRemoteCall("scroll_fully_loaded.json").doStartNextScroll("", timeValueMillis(0), r -> { assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); - assertEquals((Long) 1234L, r.getHits().get(0).getTTL()); - assertEquals((Long) 123444L, r.getHits().get(0).getTimestamp()); assertEquals("testrouting", r.getHits().get(0).getRouting()); assertEquals("testparent", r.getHits().get(0).getParent()); called.set(true); @@ -222,8 +216,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { sourceWithMockedRemoteCall("scroll_fully_loaded_1_7.json").doStartNextScroll("", timeValueMillis(0), r -> { assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); - assertEquals((Long) 1234L, r.getHits().get(0).getTTL()); - assertNull(r.getHits().get(0).getTimestamp()); // Not available from 1.7 assertEquals("testrouting", r.getHits().get(0).getRouting()); assertEquals("testparent", r.getHits().get(0).getParent()); called.set(true); @@ -248,8 +240,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase { assertEquals("test", r.getHits().get(0).getType()); assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId()); assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString()); - assertNull(r.getHits().get(0).getTTL()); - assertNull(r.getHits().get(0).getTimestamp()); assertNull(r.getHits().get(0).getRouting()); called.set(true); }); diff --git a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml index ffcdb42c86c..68ae83eabd2 100644 --- a/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml +++ b/modules/reindex/src/test/resources/rest-api-spec/test/reindex/20_validation.yaml @@ -113,42 +113,6 @@ index: dest size: -4 ---- -"can't set ttl": - - do: - index: - index: test - type: test - id: 1 - body: { "text": "test" } - - do: - catch: /setting ttl on destination isn't supported. use scripts instead./ - reindex: - body: - source: - index: test - dest: - index: dest - ttl: 3m - ---- -"can't set timestamp": - - do: - index: - index: test - type: test - id: 1 - body: { "text": "test" } - - do: - catch: /setting timestamp on destination isn't supported. use scripts instead./ - reindex: - body: - source: - index: test - dest: - index: dest - timestamp: "123" - --- "requests_per_second cannot be an empty string": - do: diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java index bb84c47dc17..20dd2d1a9fc 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Transport.java @@ -55,6 +55,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.FutureUtils; @@ -62,7 +63,9 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.monitor.jvm.JvmInfo; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.TcpTransport; +import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportServiceAdapter; import org.elasticsearch.transport.TransportSettings; @@ -202,7 +205,7 @@ public class Netty4Transport extends TcpTransport { bootstrap.handler(getClientChannelInitializer()); - bootstrap.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, Math.toIntExact(connectTimeout.millis())); + bootstrap.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, Math.toIntExact(defaultConnectionProfile.getConnectTimeout().millis())); bootstrap.option(ChannelOption.TCP_NODELAY, TCP_NO_DELAY.get(settings)); bootstrap.option(ChannelOption.SO_KEEPALIVE, TCP_KEEP_ALIVE.get(settings)); @@ -268,8 +271,13 @@ public class Netty4Transport extends TcpTransport { logger.debug("using profile[{}], worker_count[{}], port[{}], bind_host[{}], publish_host[{}], compress[{}], " + "connect_timeout[{}], connections_per_node[{}/{}/{}/{}/{}], receive_predictor[{}->{}]", name, workerCount, settings.get("port"), settings.get("bind_host"), settings.get("publish_host"), compress, - connectTimeout, connectionsPerNodeRecovery, connectionsPerNodeBulk, connectionsPerNodeReg, connectionsPerNodeState, - connectionsPerNodePing, receivePredictorMin, receivePredictorMax); + defaultConnectionProfile.getConnectTimeout(), + defaultConnectionProfile.getNumConnectionsPerType(TransportRequestOptions.Type.RECOVERY), + defaultConnectionProfile.getNumConnectionsPerType(TransportRequestOptions.Type.BULK), + defaultConnectionProfile.getNumConnectionsPerType(TransportRequestOptions.Type.REG), + defaultConnectionProfile.getNumConnectionsPerType(TransportRequestOptions.Type.STATE), + defaultConnectionProfile.getNumConnectionsPerType(TransportRequestOptions.Type.PING), + receivePredictorMin, receivePredictorMax); } final ThreadFactory workerFactory = daemonThreadFactory(this.settings, TRANSPORT_SERVER_WORKER_THREAD_NAME_PREFIX, name); @@ -331,37 +339,24 @@ public class Netty4Transport extends TcpTransport { return channels == null ? 0 : channels.numberOfOpenChannels(); } - protected NodeChannels connectToChannelsLight(DiscoveryNode node) { - InetSocketAddress address = node.getAddress().address(); - ChannelFuture connect = bootstrap.connect(address); - connect.awaitUninterruptibly((long) (connectTimeout.millis() * 1.5)); - if (!connect.isSuccess()) { - throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", connect.cause()); - } - Channel[] channels = new Channel[1]; - channels[0] = connect.channel(); - channels[0].closeFuture().addListener(new ChannelCloseListener(node)); - NodeChannels nodeChannels = new NodeChannels(channels, channels, channels, channels, channels); - onAfterChannelsConnected(nodeChannels); - return nodeChannels; - } - - protected NodeChannels connectToChannels(DiscoveryNode node) { - final NodeChannels nodeChannels = - new NodeChannels( - new Channel[connectionsPerNodeRecovery], - new Channel[connectionsPerNodeBulk], - new Channel[connectionsPerNodeReg], - new Channel[connectionsPerNodeState], - new Channel[connectionsPerNodePing]); + @Override + protected NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile profile) { + final Channel[] channels = new Channel[profile.getNumConnections()]; + final NodeChannels nodeChannels = new NodeChannels(channels, profile); boolean success = false; try { - int numConnections = - connectionsPerNodeRecovery + - connectionsPerNodeBulk + - connectionsPerNodeReg + - connectionsPerNodeState + - connectionsPerNodeRecovery; + final int numConnections = channels.length; + final TimeValue connectTimeout; + final Bootstrap bootstrap; + final TimeValue defaultConnectTimeout = defaultConnectionProfile.getConnectTimeout(); + if (profile.getConnectTimeout() != null && profile.getConnectTimeout().equals(defaultConnectTimeout) == false) { + bootstrap = this.bootstrap.clone(this.bootstrap.config().group()); + bootstrap.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, Math.toIntExact(profile.getConnectTimeout().millis())); + connectTimeout = profile.getConnectTimeout(); + } else { + connectTimeout = defaultConnectTimeout; + bootstrap = this.bootstrap; + } final ArrayList connections = new ArrayList<>(numConnections); final InetSocketAddress address = node.getAddress().address(); for (int i = 0; i < numConnections; i++) { @@ -369,27 +364,15 @@ public class Netty4Transport extends TcpTransport { } final Iterator iterator = connections.iterator(); try { - for (Channel[] channels : nodeChannels.getChannelArrays()) { - for (int i = 0; i < channels.length; i++) { - assert iterator.hasNext(); - ChannelFuture future = iterator.next(); - future.awaitUninterruptibly((long) (connectTimeout.millis() * 1.5)); - if (!future.isSuccess()) { - throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", future.cause()); - } - channels[i] = future.channel(); - channels[i].closeFuture().addListener(new ChannelCloseListener(node)); + for (int i = 0; i < channels.length; i++) { + assert iterator.hasNext(); + ChannelFuture future = iterator.next(); + future.awaitUninterruptibly((long) (connectTimeout.millis() * 1.5)); + if (!future.isSuccess()) { + throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", future.cause()); } - } - if (nodeChannels.recovery.length == 0) { - if (nodeChannels.bulk.length > 0) { - nodeChannels.recovery = nodeChannels.bulk; - } else { - nodeChannels.recovery = nodeChannels.reg; - } - } - if (nodeChannels.bulk.length == 0) { - nodeChannels.bulk = nodeChannels.reg; + channels[i] = future.channel(); + channels[i].closeFuture().addListener(new ChannelCloseListener(node)); } } catch (final RuntimeException e) { for (final ChannelFuture future : Collections.unmodifiableList(connections)) { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index 774ac596382..08d825a354d 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -32,12 +32,11 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.ESLoggerFactory; import java.io.IOException; -import java.io.PrintWriter; -import java.io.StringWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.stream.Collectors; public class Netty4Utils { @@ -122,7 +121,13 @@ public class Netty4Utils { } } - public static void maybeDie(final Throwable cause) throws IOException { + /** + * If the specified cause is an unrecoverable error, this method will rethrow the cause on a separate thread so that it can not be + * caught and bubbles up to the uncaught exception handler. + * + * @param cause the throwable to test + */ + public static void maybeDie(final Throwable cause) { if (cause instanceof Error) { /* * Here be dragons. We want to rethrow this so that it bubbles up to the uncaught exception handler. Yet, Netty wraps too many @@ -131,20 +136,17 @@ public class Netty4Utils { * the exception so as to not lose the original cause during exit, so we give the thread a name based on the previous stack * frame so that at least we know where it came from (in case logging the current stack trace fails). */ - try ( - final StringWriter sw = new StringWriter(); - final PrintWriter pw = new PrintWriter(sw)) { + try { // try to log the current stack trace - Arrays.stream(Thread.currentThread().getStackTrace()).skip(1).map(e -> "\tat " + e).forEach(pw::println); - ESLoggerFactory.getLogger(Netty4Utils.class).error("fatal error on the network layer\n{}", sw.toString()); + final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace(); + final String formatted = Arrays.stream(stackTrace).skip(1).map(e -> "\tat " + e).collect(Collectors.joining("\n")); + ESLoggerFactory.getLogger(Netty4Utils.class).error("fatal error on the network layer\n{}", formatted); } finally { - final StackTraceElement previous = Thread.currentThread().getStackTrace()[2]; new Thread( - () -> { - throw (Error) cause; - }, - previous.getClassName() + "#" + previous.getMethodName()) - .start(); + () -> { + throw (Error) cause; + }) + .start(); } } } diff --git a/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy b/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy index 902bfdee231..ce39869d2fb 100644 --- a/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy +++ b/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy @@ -17,8 +17,13 @@ * under the License. */ +grant codeBase "${codebase.netty-common-4.1.6.Final.jar}" { + // for reading the system-wide configuration for the backlog of established sockets + permission java.io.FilePermission "/proc/sys/net/core/somaxconn", "read"; +}; + grant codeBase "${codebase.netty-transport-4.1.6.Final.jar}" { // Netty NioEventLoop wants to change this, because of https://bugs.openjdk.java.net/browse/JDK-6427854 // the bug says it only happened rarely, and that its fixed, but apparently it still happens rarely! permission java.util.PropertyPermission "sun.nio.ch.bugLevel", "write"; -}; \ No newline at end of file +}; diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java index 0b8b347e30f..d99820bb864 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpRequestSizeLimitIT.java @@ -101,7 +101,7 @@ public class Netty4HttpRequestSizeLimitIT extends ESNetty4IntegTestCase { Tuple[] requestUris = new Tuple[1500]; for (int i = 0; i < requestUris.length; i++) { requestUris[i] = Tuple.tuple("/_cluster/settings", - "{ \"transient\": {\"indices.ttl.interval\": \"40s\" } }"); + "{ \"transient\": {\"search.default_search_timeout\": \"40s\" } }"); } HttpServerTransport httpServerTransport = internalCluster().getInstance(HttpServerTransport.class); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java index 8716f59ee00..037229f0972 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java @@ -19,5 +19,53 @@ package org.elasticsearch.rest; -public class Netty4HeadBodyIsEmptyIT extends HeadBodyIsEmptyIntegTestCase { +import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Response; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.hamcrest.Matcher; + +import java.io.IOException; +import java.util.Map; + +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; + +public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase { + + public void testHeadRoot() throws IOException { + headTestCase("/", emptyMap(), greaterThan(0)); + headTestCase("/", singletonMap("pretty", ""), greaterThan(0)); + headTestCase("/", singletonMap("pretty", "true"), greaterThan(0)); + } + + private void createTestDoc() throws IOException { + client().performRequest("PUT", "test/test/1", emptyMap(), new StringEntity("{\"test\": \"test\"}")); + } + + public void testDocumentExists() throws IOException { + createTestDoc(); + headTestCase("test/test/1", emptyMap(), equalTo(0)); + headTestCase("test/test/1", singletonMap("pretty", "true"), equalTo(0)); + } + + public void testIndexExists() throws IOException { + createTestDoc(); + headTestCase("test", emptyMap(), equalTo(0)); + headTestCase("test", singletonMap("pretty", "true"), equalTo(0)); + } + + public void testTypeExists() throws IOException { + createTestDoc(); + headTestCase("test/test", emptyMap(), equalTo(0)); + headTestCase("test/test", singletonMap("pretty", "true"), equalTo(0)); + } + + private void headTestCase(String url, Map params, Matcher matcher) throws IOException { + Response response = client().performRequest("HEAD", url, params); + assertEquals(200, response.getStatusLine().getStatusCode()); + assertThat(Integer.valueOf(response.getHeader("Content-Length")), matcher); + assertNull("HEAD requests shouldn't have a response body but " + url + " did", response.getEntity()); + } } diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.3.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.3.0.jar.sha1 deleted file mode 100644 index 22ff33fee84..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -467d808656db028faa3cbc86d386dbf6164a835c \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.4.0-snapshot-ec38570.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..7ee6c4f0787 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +0850319baf063c5ee54aecabeaddb95efde8711b \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.3.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.3.0.jar.sha1 deleted file mode 100644 index 13226a0d4be..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bea02277bff7fa0f4d93e6abca94eaf0eec9c84f \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.4.0-snapshot-ec38570.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..c66710ea344 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +3a2af1d2e80b9901b3e950f5ac1b6cd1eb408fd3 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.3.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.3.0.jar.sha1 deleted file mode 100644 index 5a57464512f..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -657a1409f539b4a20b5487496a8e4471b33902fd \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.4.0-snapshot-ec38570.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..06702f8d87e --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +7e9243da1482f88a91bd5239316b571259d24341 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.3.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.3.0.jar.sha1 deleted file mode 100644 index eab2257293c..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -47792194b04e8cd61c3667da50a38adae257b19a \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.4.0-snapshot-ec38570.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..561a46f2a2c --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +2ead714733bb3cc90e9792d76021497946d5af09 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.3.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.3.0.jar.sha1 deleted file mode 100644 index bba7a9bc273..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bcf535520b92821cf04486031214d35d7405571c \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.4.0-snapshot-ec38570.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..c3b55dc76a1 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +9a8f3b58e6c672276331f54b5c3be49c8014ec5c \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.3.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.3.0.jar.sha1 deleted file mode 100644 index e136d57854a..00000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -82ed82174fae75f93741b8418046bc94e50434f8 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.4.0-snapshot-ec38570.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.4.0-snapshot-ec38570.jar.sha1 new file mode 100644 index 00000000000..3fbc82d91c1 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.4.0-snapshot-ec38570.jar.sha1 @@ -0,0 +1 @@ +763b3144b9bc53328e923242a3c6614903ee2d7e \ No newline at end of file diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index 418d81fcf1e..1f5055f1429 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -23,10 +23,10 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; @@ -144,7 +144,7 @@ public class Murmur3FieldMapper extends FieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) + protected void parseCreateField(ParseContext context, List fields) throws IOException { final Object value; if (context.externalValueSet()) { diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java index 72930344bbf..af5886bb6f9 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperTests.java @@ -55,7 +55,7 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase { DocumentMapperParser parser; @Before - public void before() { + public void setup() { indexService = createIndex("test"); mapperRegistry = new MapperRegistry( Collections.singletonMap(Murmur3FieldMapper.CONTENT_TYPE, new Murmur3FieldMapper.TypeParser()), diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java deleted file mode 100644 index c632e139955..00000000000 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapperUpgradeTests.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper.murmur3; - -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestUtil; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalTestCluster; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collection; -import java.util.Collections; -import java.util.concurrent.ExecutionException; - -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) -@LuceneTestCase.SuppressFileSystems("ExtrasFS") -public class Murmur3FieldMapperUpgradeTests extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(MapperMurmur3Plugin.class); - } - - public void testUpgradeOldMapping() throws IOException, ExecutionException, InterruptedException { - final String indexName = "index-mapper-murmur3-2.0.0"; - final String indexUUID = "1VzJds59TTK7lRu17W0mcg"; - InternalTestCluster.Async master = internalCluster().startNodeAsync(); - Path unzipDir = createTempDir(); - Path unzipDataDir = unzipDir.resolve("data"); - Path backwardsIndex = getBwcIndicesPath().resolve(indexName + ".zip"); - try (InputStream stream = Files.newInputStream(backwardsIndex)) { - TestUtil.unzip(stream, unzipDir); - } - assertTrue(Files.exists(unzipDataDir)); - - Path dataPath = createTempDir(); - Settings settings = Settings.builder() - .put(Environment.PATH_DATA_SETTING.getKey(), dataPath) - .build(); - final String node = internalCluster().startDataOnlyNode(settings); // workaround for dangling index loading issue when node is master - Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); - assertEquals(1, nodePaths.length); - dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); - assertFalse(Files.exists(dataPath)); - Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices"); - Files.move(src, dataPath); - Files.move(dataPath.resolve(indexName), dataPath.resolve(indexUUID)); - - master.get(); - // force reloading dangling indices with a cluster state republish - client().admin().cluster().prepareReroute().get(); - ensureGreen(indexName); - final SearchResponse countResponse = client().prepareSearch(indexName).setSize(0).get(); - ElasticsearchAssertions.assertHitCount(countResponse, 3L); - - final SearchResponse cardinalityResponse = client().prepareSearch(indexName).addAggregation( - AggregationBuilders.cardinality("card").field("foo.hash")).get(); - Cardinality cardinality = cardinalityResponse.getAggregations().get("card"); - assertEquals(3L, cardinality.getValue()); - } -} diff --git a/plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip b/plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip deleted file mode 100644 index 0b69aac180b..00000000000 Binary files a/plugins/mapper-murmur3/src/test/resources/indices/bwc/index-mapper-murmur3-2.0.0.zip and /dev/null differ diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index a9fb9049b04..a12d5be1fde 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -19,14 +19,13 @@ package org.elasticsearch.index.mapper.size; -import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; import org.elasticsearch.Version; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.EnabledAttributeMapper; -import org.elasticsearch.index.mapper.LegacyIntegerFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; @@ -49,40 +48,20 @@ public class SizeFieldMapper extends MetadataFieldMapper { public static final MappedFieldType SIZE_FIELD_TYPE = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER); - public static final MappedFieldType LEGACY_SIZE_FIELD_TYPE = - LegacyIntegerFieldMapper.Defaults.FIELD_TYPE.clone(); static { SIZE_FIELD_TYPE.setStored(true); SIZE_FIELD_TYPE.setName(NAME); SIZE_FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); SIZE_FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); + SIZE_FIELD_TYPE.setHasDocValues(true); SIZE_FIELD_TYPE.freeze(); - - LEGACY_SIZE_FIELD_TYPE.setStored(true); - LEGACY_SIZE_FIELD_TYPE.setNumericPrecisionStep(LegacyIntegerFieldMapper.Defaults.PRECISION_STEP_32_BIT); - LEGACY_SIZE_FIELD_TYPE.setName(NAME); - LEGACY_SIZE_FIELD_TYPE.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); - LEGACY_SIZE_FIELD_TYPE.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER); - LEGACY_SIZE_FIELD_TYPE.freeze(); } } private static MappedFieldType defaultFieldType(Version indexCreated) { - MappedFieldType defaultFieldType; - if (indexCreated.before(Version.V_5_0_0_alpha2)) { - defaultFieldType = Defaults.LEGACY_SIZE_FIELD_TYPE.clone(); - // doc_values are disabled for bwc with indices created before V_5_0_0_alpha4 - defaultFieldType.setHasDocValues(false); - } else { - defaultFieldType = Defaults.SIZE_FIELD_TYPE.clone(); - if (indexCreated.onOrBefore(Version.V_5_0_0_alpha4)) { - // doc_values are disabled for bwc with indices created before V_5_0_0_alpha4 - defaultFieldType.setHasDocValues(false); - } else { - defaultFieldType.setHasDocValues(true); - } - } + MappedFieldType defaultFieldType = Defaults.SIZE_FIELD_TYPE.clone(); + defaultFieldType.setHasDocValues(true); return defaultFieldType; } @@ -176,19 +155,15 @@ public class SizeFieldMapper extends MetadataFieldMapper { } @Override - protected void parseCreateField(ParseContext context, List fields) throws IOException { + protected void parseCreateField(ParseContext context, List fields) throws IOException { if (!enabledState.enabled) { return; } final int value = context.sourceToParse().source().length(); - if (Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha2)) { - fields.add(new LegacyIntegerFieldMapper.CustomIntegerNumericField(value, fieldType())); - } else { - boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; - boolean docValued = fieldType().hasDocValues(); - boolean stored = fieldType().stored(); - fields.addAll(NumberFieldMapper.NumberType.INTEGER.createFields(name(), value, indexed, docValued, stored)); - } + boolean indexed = fieldType().indexOptions() != IndexOptions.NONE; + boolean docValued = fieldType().hasDocValues(); + boolean stored = fieldType().stored(); + fields.addAll(NumberFieldMapper.NumberType.INTEGER.createFields(name(), value, indexed, docValued, stored)); } @Override diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java deleted file mode 100644 index 7cbce102c57..00000000000 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeFieldMapperUpgradeTests.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.mapper.size; - -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.TestUtil; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.plugin.mapper.MapperSizePlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHitField; -import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.InternalTestCluster; -import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.Collection; -import java.util.Collections; -import java.util.Map; -import java.util.concurrent.ExecutionException; - -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 0) -@LuceneTestCase.SuppressFileSystems("ExtrasFS") -public class SizeFieldMapperUpgradeTests extends ESIntegTestCase { - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(MapperSizePlugin.class); - } - - public void testUpgradeOldMapping() throws IOException, ExecutionException, InterruptedException { - final String indexName = "index-mapper-size-2.0.0"; - final String indexUUID = "ENCw7sG0SWuTPcH60bHheg"; - InternalTestCluster.Async master = internalCluster().startNodeAsync(); - Path unzipDir = createTempDir(); - Path unzipDataDir = unzipDir.resolve("data"); - Path backwardsIndex = getBwcIndicesPath().resolve(indexName + ".zip"); - try (InputStream stream = Files.newInputStream(backwardsIndex)) { - TestUtil.unzip(stream, unzipDir); - } - assertTrue(Files.exists(unzipDataDir)); - - Path dataPath = createTempDir(); - Settings settings = Settings.builder() - .put(Environment.PATH_DATA_SETTING.getKey(), dataPath) - .build(); - // workaround for dangling index loading issue when node is master - final String node = internalCluster().startDataOnlyNode(settings); - Path[] nodePaths = internalCluster().getInstance(NodeEnvironment.class, node).nodeDataPaths(); - assertEquals(1, nodePaths.length); - dataPath = nodePaths[0].resolve(NodeEnvironment.INDICES_FOLDER); - assertFalse(Files.exists(dataPath)); - Path src = unzipDataDir.resolve(indexName + "/nodes/0/indices"); - Files.move(src, dataPath); - Files.move(dataPath.resolve(indexName), dataPath.resolve(indexUUID)); - master.get(); - // force reloading dangling indices with a cluster state republish - client().admin().cluster().prepareReroute().get(); - ensureGreen(indexName); - final SearchResponse countResponse = client().prepareSearch(indexName).setSize(0).get(); - ElasticsearchAssertions.assertHitCount(countResponse, 3L); - - final SearchResponse sizeResponse = client().prepareSearch(indexName) - .addStoredField("_source") - .addStoredField("_size") - .get(); - ElasticsearchAssertions.assertHitCount(sizeResponse, 3L); - for (SearchHit hit : sizeResponse.getHits().getHits()) { - String source = hit.getSourceAsString(); - assertNotNull(source); - Map fields = hit.getFields(); - assertTrue(fields.containsKey("_size")); - Number size = fields.get("_size").getValue(); - assertNotNull(size); - assertEquals(source.length(), size.longValue()); - } - } - -} diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java index 5d832aa1518..3fa5300fb43 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.IndexService; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.DocumentMapper; -import org.elasticsearch.index.mapper.LegacyNumberFieldMapper; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.MappedFieldType; @@ -115,41 +114,4 @@ public class SizeMappingTests extends ESSingleNodeTestCase { assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false)); } - public void testBWCMapper() throws Exception { - { - // IntPoint && docvalues=true for V_5_0_0_alpha5 - IndexService service = createIndex("foo", Settings.EMPTY, "bar", "_size", "enabled=true"); - DocumentMapper docMapper = service.mapperService().documentMapper("bar"); - SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); - assertThat(mapper.enabled(), is(true)); - MappedFieldType ft = mapper.fieldType(); - assertThat(ft.hasDocValues(), is(true)); - assertThat(mapper.fieldType(), instanceOf(NumberFieldMapper.NumberFieldType.class)); - } - - { - // IntPoint with docvalues=false if version > V_5_0_0_alpha2 && version < V_5_0_0_beta1 - IndexService service = createIndex("foo2", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0_alpha4.id).build(), - "bar", "_size", "enabled=true"); - DocumentMapper docMapper = service.mapperService().documentMapper("bar"); - SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); - assertThat(mapper.enabled(), is(true)); - assertThat(mapper.fieldType().hasDocValues(), is(false)); - assertThat(mapper.fieldType(), instanceOf(NumberFieldMapper.NumberFieldType.class)); - } - - { - // LegacyIntField with docvalues=false if version < V_5_0_0_alpha2 - IndexService service = createIndex("foo3", - Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0_alpha1.id).build(), - "bar", "_size", "enabled=true"); - DocumentMapper docMapper = service.mapperService().documentMapper("bar"); - SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class); - assertThat(mapper.enabled(), is(true)); - assertThat(mapper.fieldType().hasDocValues(), is(false)); - assertThat(mapper.fieldType(), instanceOf(LegacyNumberFieldMapper.NumberFieldType.class)); - } - } - } diff --git a/plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip b/plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip deleted file mode 100644 index 0a74f835c3e..00000000000 Binary files a/plugins/mapper-size/src/test/resources/indices/bwc/index-mapper-size-2.0.0.zip and /dev/null differ diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java index 345ccb0ddcd..94acfb61646 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java @@ -33,7 +33,7 @@ public class IngestDocumentMustacheIT extends AbstractScriptTestCase { public void testAccessMetaDataViaTemplate() { Map document = new HashMap<>(); document.put("foo", "bar"); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, document); ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("1 {{foo}}", templateService)); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 bar")); @@ -48,7 +48,7 @@ public class IngestDocumentMustacheIT extends AbstractScriptTestCase { innerObject.put("baz", "hello baz"); innerObject.put("qux", Collections.singletonMap("fubar", "hello qux and fubar")); document.put("foo", innerObject); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, document); ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("1 {{foo.bar}} {{foo.baz}} {{foo.qux.fubar}}", templateService)); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 hello bar hello baz hello qux and fubar")); @@ -67,7 +67,7 @@ public class IngestDocumentMustacheIT extends AbstractScriptTestCase { list.add(value); list.add(null); document.put("list2", list); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, document); ingestDocument.setFieldValue(templateService.compile("field1"), ValueSource.wrap("1 {{list1.0}} {{list2.0}}", templateService)); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 foo {field=value}")); } @@ -77,7 +77,7 @@ public class IngestDocumentMustacheIT extends AbstractScriptTestCase { Map ingestMap = new HashMap<>(); ingestMap.put("timestamp", "bogus_timestamp"); document.put("_ingest", ingestMap); - IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document); + IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, document); ingestDocument.setFieldValue(templateService.compile("ingest_timestamp"), ValueSource.wrap("{{_ingest.timestamp}} and {{_source._ingest.timestamp}}", templateService)); assertThat(ingestDocument.getFieldValue("ingest_timestamp", String.class), diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java index 884897885ca..38f17d6bada 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java @@ -64,7 +64,7 @@ public class ValueSourceMustacheIT extends AbstractScriptTestCase { } public void testAccessSourceViaTemplate() { - IngestDocument ingestDocument = new IngestDocument("marvel", "type", "id", null, null, null, null, new HashMap<>()); + IngestDocument ingestDocument = new IngestDocument("marvel", "type", "id", null, null, new HashMap<>()); assertThat(ingestDocument.hasField("marvel"), is(false)); ingestDocument.setFieldValue(templateService.compile("{{_index}}"), ValueSource.wrap("{{_index}}", templateService)); assertThat(ingestDocument.getFieldValue("marvel", String.class), equalTo("marvel")); diff --git a/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java b/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java index 1fe8cfeb9d5..225e12f65fd 100644 --- a/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java +++ b/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java @@ -22,12 +22,15 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; +import org.apache.lucene.util.TimeUnits; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import java.io.IOException; +@TimeoutSuite(millis = 40 * TimeUnits.MINUTE) // some of the windows test VMs are slow as hell public class SmokeTestMultiNodeClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public SmokeTestMultiNodeClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { diff --git a/qa/vagrant/versions b/qa/vagrant/versions index 2ea69c82e4b..c04bbb7dd3b 100644 --- a/qa/vagrant/versions +++ b/qa/vagrant/versions @@ -1,2 +1,3 @@ 5.0.0 5.0.1 +5.0.2 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml index bc4dc82243f..f53c88bcfca 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/10_basic.yaml @@ -1,24 +1,3 @@ ---- -setup: - # Disable the disk threshold decider so the test doesn't fail if the disk is - # > 85% full - - do: - cluster.put_settings: - body: - persistent: - cluster.routing.allocation.disk.threshold_enabled: false - flat_settings: true - ---- -teardown: - # Reset the disk threshold decider so we don't leave anything behind. - - do: - cluster.put_settings: - body: - persistent: - cluster.routing.allocation.disk.threshold_enabled: null - flat_settings: true - --- "Shrink index via API": # creates an index with one document solely allocated on the master node diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml index 4f7d91f42af..f94ba86d914 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml @@ -31,26 +31,38 @@ field: type: text aliases: - test_alias_1: {} - test_alias_2: + test_alias_no_filter: {} + test_alias_filter_1: filter: term: - field : value + field : value1 + test_alias_filter_2: + filter: + term: + field : value2 - do: search_shards: - index: test_alias_1 + index: test_alias_no_filter - length: { shards: 1 } - match: { shards.0.0.index: test_index } - is_true: indices.test_index - is_false: indices.test_index.filter + - is_false: indices.test_index.aliases - do: search_shards: - index: test_alias_2 + index: test_alias_filter_1 - length: { shards: 1 } - match: { shards.0.0.index: test_index } - - match: { indices.test_index: {filter: { term : { field: { value: value, boost: 1.0}}}}} + - match: { indices.test_index: {aliases: [test_alias_filter_1], filter: { term : { field: { value: value1, boost: 1.0}}}}} + - do: + search_shards: + index: ["test_alias_filter_1","test_alias_filter_2"] + + - length: { shards: 1 } + - match: { shards.0.0.index: test_index } + - match: { indices.test_index: {aliases: [test_alias_filter_1, test_alias_filter_2], filter: { bool: { should : [{ term : { field: { value: value1, boost: 1.0}}}, { term : { field: { value: value2, boost: 1.0}}}], adjust_pure_negative: true, boost: 1.0, disable_coord: false }}}} diff --git a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java index 8d1f42d5b57..5f76ac0030d 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java +++ b/test/framework/src/main/java/org/elasticsearch/common/util/MockBigArrays.java @@ -48,7 +48,6 @@ public class MockBigArrays extends BigArrays { */ private static final boolean TRACK_ALLOCATIONS = false; - private static final Set INSTANCES = Collections.synchronizedSet(Collections.newSetFromMap(new WeakHashMap())); private static final ConcurrentMap ACQUIRED_ARRAYS = new ConcurrentHashMap<>(); public static void ensureAllArraysAreReleased() throws Exception { @@ -88,7 +87,6 @@ public class MockBigArrays extends BigArrays { seed = 0; } random = new Random(seed); - INSTANCES.add(this); } @@ -247,15 +245,13 @@ public class MockBigArrays extends BigArrays { private abstract static class AbstractArrayWrapper { - final BigArray in; - boolean clearOnResize; - AtomicReference originalRelease; + final boolean clearOnResize; + private final AtomicReference originalRelease; - AbstractArrayWrapper(BigArray in, boolean clearOnResize) { - ACQUIRED_ARRAYS.put(this, TRACK_ALLOCATIONS ? new RuntimeException() : Boolean.TRUE); - this.in = in; + AbstractArrayWrapper(boolean clearOnResize) { this.clearOnResize = clearOnResize; - originalRelease = new AtomicReference<>(); + this.originalRelease = new AtomicReference<>(); + ACQUIRED_ARRAYS.put(this, TRACK_ALLOCATIONS ? new RuntimeException() : Boolean.TRUE); } protected abstract BigArray getDelegate(); @@ -267,7 +263,7 @@ public class MockBigArrays extends BigArrays { } public long ramBytesUsed() { - return in.ramBytesUsed(); + return getDelegate().ramBytesUsed(); } public void close() { @@ -286,7 +282,7 @@ public class MockBigArrays extends BigArrays { private final ByteArray in; ByteArrayWrapper(ByteArray in, boolean clearOnResize) { - super(in, clearOnResize); + super(clearOnResize); this.in = in; } @@ -336,7 +332,7 @@ public class MockBigArrays extends BigArrays { private final IntArray in; IntArrayWrapper(IntArray in, boolean clearOnResize) { - super(in, clearOnResize); + super(clearOnResize); this.in = in; } @@ -381,7 +377,7 @@ public class MockBigArrays extends BigArrays { private final LongArray in; LongArrayWrapper(LongArray in, boolean clearOnResize) { - super(in, clearOnResize); + super(clearOnResize); this.in = in; } @@ -427,7 +423,7 @@ public class MockBigArrays extends BigArrays { private final FloatArray in; FloatArrayWrapper(FloatArray in, boolean clearOnResize) { - super(in, clearOnResize); + super(clearOnResize); this.in = in; } @@ -472,7 +468,7 @@ public class MockBigArrays extends BigArrays { private final DoubleArray in; DoubleArrayWrapper(DoubleArray in, boolean clearOnResize) { - super(in, clearOnResize); + super(clearOnResize); this.in = in; } @@ -517,7 +513,7 @@ public class MockBigArrays extends BigArrays { private final ObjectArray in; ObjectArrayWrapper(ObjectArray in) { - super(in, false); + super(false); this.in = in; } diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java index efdf10d5a5d..f1c19710850 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/RandomDocumentPicks.java @@ -144,15 +144,7 @@ public final class RandomDocumentPicks { if (random.nextBoolean()) { parent = randomString(random); } - String timestamp = null; - if (random.nextBoolean()) { - timestamp = randomString(random); - } - String ttl = null; - if (random.nextBoolean()) { - ttl = randomString(random); - } - return new IngestDocument(index, type, id, routing, parent, timestamp, ttl, source); + return new IngestDocument(index, type, id, routing, parent, source); } public static Map randomSource(Random random) { diff --git a/test/framework/src/main/java/org/elasticsearch/rest/HeadBodyIsEmptyIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/rest/HeadBodyIsEmptyIntegTestCase.java deleted file mode 100644 index 0e43814b75c..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/rest/HeadBodyIsEmptyIntegTestCase.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest; - -import org.apache.http.entity.StringEntity; -import org.elasticsearch.client.Response; -import org.elasticsearch.test.rest.ESRestTestCase; -import org.hamcrest.Matcher; - -import java.io.IOException; -import java.io.UnsupportedEncodingException; -import java.util.Map; - -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; - -/** - * Tests that HTTP HEAD requests don't respond with a body. - */ -public class HeadBodyIsEmptyIntegTestCase extends ESRestTestCase { - public void testHeadRoot() throws IOException { - headTestCase("/", emptyMap(), greaterThan(0)); - headTestCase("/", singletonMap("pretty", ""), greaterThan(0)); - headTestCase("/", singletonMap("pretty", "true"), greaterThan(0)); - } - - private void createTestDoc() throws UnsupportedEncodingException, IOException { - client().performRequest("PUT", "test/test/1", emptyMap(), new StringEntity("{\"test\": \"test\"}")); - } - - public void testDocumentExists() throws IOException { - createTestDoc(); - headTestCase("test/test/1", emptyMap(), equalTo(0)); - headTestCase("test/test/1", singletonMap("pretty", "true"), equalTo(0)); - } - - public void testIndexExists() throws IOException { - createTestDoc(); - headTestCase("test", emptyMap(), equalTo(0)); - headTestCase("test", singletonMap("pretty", "true"), equalTo(0)); - } - - public void testTypeExists() throws IOException { - createTestDoc(); - headTestCase("test/test", emptyMap(), equalTo(0)); - headTestCase("test/test", singletonMap("pretty", "true"), equalTo(0)); - } - - private void headTestCase(String url, Map params, Matcher matcher) throws IOException { - Response response = client().performRequest("HEAD", url, params); - assertEquals(200, response.getStatusLine().getStatusCode()); - assertThat(Integer.valueOf(response.getHeader("Content-Length")), matcher); - assertNull("HEAD requests shouldn't have a response body but " + url + " did", response.getEntity()); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index e65e0ab4de7..a24017a994a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -73,7 +73,6 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; -import org.elasticsearch.index.mapper.LatLonPointFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -138,17 +137,19 @@ public abstract class AbstractQueryTestCase> public static final String STRING_FIELD_NAME = "mapped_string"; protected static final String STRING_FIELD_NAME_2 = "mapped_string_2"; protected static final String INT_FIELD_NAME = "mapped_int"; + protected static final String INT_RANGE_FIELD_NAME = "mapped_int_range"; protected static final String DOUBLE_FIELD_NAME = "mapped_double"; protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean"; protected static final String DATE_FIELD_NAME = "mapped_date"; + protected static final String DATE_RANGE_FIELD_NAME = "mapped_date_range"; protected static final String OBJECT_FIELD_NAME = "mapped_object"; protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point"; - protected static final String LEGACY_GEO_POINT_FIELD_MAPPING = "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true"; protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape"; - protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, - BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, GEO_SHAPE_FIELD_NAME}; - protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, DOUBLE_FIELD_NAME, - BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, GEO_POINT_FIELD_NAME}; + protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME, + DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME, + GEO_SHAPE_FIELD_NAME}; + protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME, + DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, GEO_POINT_FIELD_NAME, }; private static final int NUMBER_OF_TESTQUERIES = 20; private static ServiceHolder serviceHolder; @@ -827,7 +828,7 @@ public abstract class AbstractQueryTestCase> value = randomIntBetween(0, 10); break; case DOUBLE_FIELD_NAME: - value = randomDouble() * 10; + value = 1 + randomDouble() * 9; break; case BOOLEAN_FIELD_NAME: value = randomBoolean(); @@ -1108,19 +1109,18 @@ public abstract class AbstractQueryTestCase> }); indicesQueriesRegistry = searchModule.getQueryParserRegistry(); - String geoFieldMapping = (idxSettings.getIndexVersionCreated().before(LatLonPointFieldMapper.LAT_LON_FIELD_VERSION)) ? - LEGACY_GEO_POINT_FIELD_MAPPING : "type=geo_point"; - for (String type : currentTypes) { mapperService.merge(type, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(type, STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME_2, "type=keyword", INT_FIELD_NAME, "type=integer", + INT_RANGE_FIELD_NAME, "type=integer_range", DOUBLE_FIELD_NAME, "type=double", BOOLEAN_FIELD_NAME, "type=boolean", DATE_FIELD_NAME, "type=date", + DATE_RANGE_FIELD_NAME, "type=date_range", OBJECT_FIELD_NAME, "type=object", - GEO_POINT_FIELD_NAME, geoFieldMapping, + GEO_POINT_FIELD_NAME, "type=geo_point", GEO_SHAPE_FIELD_NAME, "type=geo_shape" ).string()), MapperService.MergeReason.MAPPING_UPDATE, false); // also add mappings for two inner field in the object field diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java index 1a7aac925f3..bc5aa63bf82 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESBackcompatTestCase.java @@ -142,7 +142,7 @@ public abstract class ESBackcompatTestCase extends ESIntegTestCase { } CompatibilityVersion annotation = clazz.getAnnotation(CompatibilityVersion.class); if (annotation != null) { - return Version.smallest(Version.fromId(annotation.version()), compatibilityVersion(clazz.getSuperclass())); + return Version.min(Version.fromId(annotation.version()), compatibilityVersion(clazz.getSuperclass())); } return compatibilityVersion(clazz.getSuperclass()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 5098f7a3d8d..2c87684b3d8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1986,19 +1986,17 @@ public abstract class ESIntegTestCase extends ESTestCase { @Before - public final void before() throws Exception { - + public final void setupTestCluster() throws Exception { if (runTestScopeLifecycle()) { - printTestMessage("setup"); + printTestMessage("setting up"); beforeInternal(); + printTestMessage("all set up"); } - printTestMessage("starting"); } @After - public final void after() throws Exception { - printTestMessage("finished"); + public final void cleanUpCluster() throws Exception { // Deleting indices is going to clear search contexts implicitly so we // need to check that there are no more in-flight search contexts before // we remove indices diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 2c05ff99901..b98a305c7cf 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -245,9 +245,17 @@ public abstract class ESTestCase extends LuceneTestCase { Requests.INDEX_CONTENT_TYPE = XContentType.JSON; } + @Before + public final void before() { + logger.info("[{}]: before test", getTestName()); + } + @After - public final void ensureCleanedUp() throws Exception { + public final void after() throws Exception { checkStaticState(); + ensureAllSearchContextsReleased(); + ensureCheckIndexPassed(); + logger.info("[{}]: after test", getTestName()); } private static final List statusData = new ArrayList<>(); @@ -293,7 +301,6 @@ public abstract class ESTestCase extends LuceneTestCase { } // this must be a separate method from other ensure checks above so suite scoped integ tests can call...TODO: fix that - @After public final void ensureAllSearchContextsReleased() throws Exception { assertBusy(() -> MockSearchService.assertNoInFlightContext()); } @@ -309,7 +316,6 @@ public abstract class ESTestCase extends LuceneTestCase { checkIndexFailed = false; } - @After public final void ensureCheckIndexPassed() throws Exception { assertFalse("at least one shard failed CheckIndex", checkIndexFailed); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java index dcb91ed441e..97fe039681b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java +++ b/test/framework/src/main/java/org/elasticsearch/test/IndexSettingsModule.java @@ -21,7 +21,6 @@ package org.elasticsearch.test; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.AbstractModule; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -67,7 +66,7 @@ public class IndexSettingsModule extends AbstractModule { if (setting.length > 0) { settingSet.addAll(Arrays.asList(setting)); } - return new IndexSettings(metaData, nodeSettings, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()), new IndexScopedSettings(Settings.EMPTY, settingSet)); + return new IndexSettings(metaData, nodeSettings, new IndexScopedSettings(Settings.EMPTY, settingSet)); } public static IndexSettings newIndexSettings(Index index, Settings settings, IndexScopedSettings indexScopedSettings) { @@ -77,7 +76,7 @@ public class IndexSettingsModule extends AbstractModule { .put(settings) .build(); IndexMetaData metaData = IndexMetaData.builder(index.getName()).settings(build).build(); - return new IndexSettings(metaData, Settings.EMPTY, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()), indexScopedSettings); + return new IndexSettings(metaData, Settings.EMPTY, indexScopedSettings); } public static IndexSettings newIndexSettings(final IndexMetaData indexMetaData, Setting... setting) { @@ -85,7 +84,6 @@ public class IndexSettingsModule extends AbstractModule { if (setting.length > 0) { settingSet.addAll(Arrays.asList(setting)); } - return new IndexSettings(indexMetaData, Settings.EMPTY, (idx) -> Regex.simpleMatch(idx, indexMetaData.getIndex().getName()), - new IndexScopedSettings(Settings.EMPTY, settingSet)); + return new IndexSettings(indexMetaData, Settings.EMPTY, new IndexScopedSettings(Settings.EMPTY, settingSet)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index 37946f9e790..0b121b4aa6a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -294,9 +294,10 @@ public final class InternalTestCluster extends TestCluster { } logger.info("Setup InternalTestCluster [{}] with seed [{}] using [{}] dedicated masters, " + - "[{}] (data) nodes and [{}] coord only nodes", + "[{}] (data) nodes and [{}] coord only nodes (min_master_nodes are [{}])", clusterName, SeedUtils.formatSeed(clusterSeed), - numSharedDedicatedMasterNodes, numSharedDataNodes, numSharedCoordOnlyNodes); + numSharedDedicatedMasterNodes, numSharedDataNodes, numSharedCoordOnlyNodes, + autoManageMinMasterNodes ? "auto-managed" : "manual"); this.nodeConfigurationSource = nodeConfigurationSource; Builder builder = Settings.builder(); if (random.nextInt(5) == 0) { // sometimes set this diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java index caab35e4b42..c49ba55e2e6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/IntermittentLongGCDisruption.java @@ -39,19 +39,6 @@ public class IntermittentLongGCDisruption extends LongGCDisruption { final long delayDurationMax; - public IntermittentLongGCDisruption(Random random) { - this(null, random); - } - - public IntermittentLongGCDisruption(String disruptedNode, Random random) { - this(disruptedNode, random, 100, 200, 300, 20000); - } - - public IntermittentLongGCDisruption(String disruptedNode, Random random, long intervalBetweenDelaysMin, - long intervalBetweenDelaysMax, long delayDurationMin, long delayDurationMax) { - this(random, disruptedNode, intervalBetweenDelaysMin, intervalBetweenDelaysMax, delayDurationMin, delayDurationMax); - } - public IntermittentLongGCDisruption(Random random, String disruptedNode, long intervalBetweenDelaysMin, long intervalBetweenDelaysMax, long delayDurationMin, long delayDurationMax) { super(random, disruptedNode); @@ -88,19 +75,15 @@ public class IntermittentLongGCDisruption extends LongGCDisruption { } private void simulateLongGC(final TimeValue duration) throws InterruptedException { - final String disruptionNodeCopy = disruptedNode; - if (disruptionNodeCopy == null) { - return; - } - logger.info("node [{}] goes into GC for for [{}]", disruptionNodeCopy, duration); + logger.info("node [{}] goes into GC for for [{}]", disruptedNode, duration); final Set nodeThreads = new HashSet<>(); try { - while (stopNodeThreads(disruptionNodeCopy, nodeThreads)) ; + while (stopNodeThreads(nodeThreads)) ; if (!nodeThreads.isEmpty()) { Thread.sleep(duration.millis()); } } finally { - logger.info("node [{}] resumes from GC", disruptionNodeCopy); + logger.info("node [{}] resumes from GC", disruptedNode); resumeThreads(nodeThreads); } } @@ -109,13 +92,13 @@ public class IntermittentLongGCDisruption extends LongGCDisruption { @Override public void run() { - while (disrupting && disruptedNode != null) { + while (disrupting) { try { TimeValue duration = new TimeValue(delayDurationMin + random.nextInt((int) (delayDurationMax - delayDurationMin))); simulateLongGC(duration); duration = new TimeValue(intervalBetweenDelaysMin + random.nextInt((int) (intervalBetweenDelaysMax - intervalBetweenDelaysMin))); - if (disrupting && disruptedNode != null) { + if (disrupting) { Thread.sleep(duration.millis()); } } catch (InterruptedException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java index 6985d2dcf17..98349086df5 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruption.java @@ -19,11 +19,15 @@ package org.elasticsearch.test.disruption; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.test.InternalTestCluster; +import java.lang.management.ManagementFactory; +import java.lang.management.ThreadInfo; +import java.lang.management.ThreadMXBean; import java.util.Arrays; import java.util.Random; import java.util.Set; @@ -41,11 +45,16 @@ public class LongGCDisruption extends SingleNodeDisruption { // logging has shared JVM locks - we may suspend a thread and block other nodes from doing their thing Pattern.compile("logging\\.log4j"), // security manager is shared across all nodes AND it uses synced hashmaps interanlly - Pattern.compile("java\\.lang\\.SecurityManager") + Pattern.compile("java\\.lang\\.SecurityManager"), + // SecureRandom instance from SecureRandomHolder class is shared by all nodes + Pattern.compile("java\\.security\\.SecureRandom") }; + private static final ThreadMXBean threadBean = ManagementFactory.getThreadMXBean(); + protected final String disruptedNode; private Set suspendedThreads; + private Thread blockDetectionThread; public LongGCDisruption(Random random, String disruptedNode) { super(random); @@ -60,7 +69,7 @@ public class LongGCDisruption extends SingleNodeDisruption { suspendedThreads = ConcurrentHashMap.newKeySet(); final String currentThreadName = Thread.currentThread().getName(); - assert currentThreadName.contains("[" + disruptedNode + "]") == false : + assert isDisruptedNodeThread(currentThreadName) == false : "current thread match pattern. thread name: " + currentThreadName + ", node: " + disruptedNode; // we spawn a background thread to protect against deadlock which can happen // if there are shared resources between caller thread and and suspended threads @@ -75,7 +84,7 @@ public class LongGCDisruption extends SingleNodeDisruption { @Override protected void doRun() throws Exception { // keep trying to stop threads, until no new threads are discovered. - while (stopNodeThreads(disruptedNode, suspendedThreads)) { + while (stopNodeThreads(suspendedThreads)) { if (Thread.interrupted()) { return; } @@ -95,13 +104,52 @@ public class LongGCDisruption extends SingleNodeDisruption { } if (stoppingThread.isAlive()) { logger.warn("failed to stop node [{}]'s threads within [{}] millis. Stopping thread stack trace:\n {}" - , disruptedNode, getStoppingTimeoutInMillis(), stackTrace(stoppingThread)); + , disruptedNode, getStoppingTimeoutInMillis(), stackTrace(stoppingThread.getStackTrace())); stoppingThread.interrupt(); // best effort; throw new RuntimeException("stopping node threads took too long"); } + // block detection checks if other threads are blocked waiting on an object that is held by one + // of the threads that was suspended + if (isBlockDetectionSupported()) { + blockDetectionThread = new Thread(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + if (e instanceof InterruptedException == false) { + throw new AssertionError("unexpected exception in blockDetectionThread", e); + } + } + + @Override + protected void doRun() throws Exception { + while (Thread.currentThread().isInterrupted() == false) { + ThreadInfo[] threadInfos = threadBean.dumpAllThreads(true, true); + for (ThreadInfo threadInfo : threadInfos) { + if (isDisruptedNodeThread(threadInfo.getThreadName()) == false && + threadInfo.getLockOwnerName() != null && + isDisruptedNodeThread(threadInfo.getLockOwnerName())) { + + // find ThreadInfo object of the blocking thread (if available) + ThreadInfo blockingThreadInfo = null; + for (ThreadInfo otherThreadInfo : threadInfos) { + if (otherThreadInfo.getThreadId() == threadInfo.getLockOwnerId()) { + blockingThreadInfo = otherThreadInfo; + break; + } + } + onBlockDetected(threadInfo, blockingThreadInfo); + } + } + Thread.sleep(getBlockDetectionIntervalInMillis()); + } + } + }); + blockDetectionThread.setName(currentThreadName + "[LongGCDisruption][blockDetection]"); + blockDetectionThread.start(); + } success = true; } finally { if (success == false) { + stopBlockDetection(); // resume threads if failed resumeThreads(suspendedThreads); suspendedThreads = null; @@ -112,18 +160,35 @@ public class LongGCDisruption extends SingleNodeDisruption { } } - private String stackTrace(Thread thread) { - return Arrays.stream(thread.getStackTrace()).map(Object::toString).collect(Collectors.joining("\n")); + public boolean isDisruptedNodeThread(String threadName) { + return threadName.contains("[" + disruptedNode + "]"); + } + + private String stackTrace(StackTraceElement[] stackTraceElements) { + return Arrays.stream(stackTraceElements).map(Object::toString).collect(Collectors.joining("\n")); } @Override public synchronized void stopDisrupting() { + stopBlockDetection(); if (suspendedThreads != null) { resumeThreads(suspendedThreads); suspendedThreads = null; } } + private void stopBlockDetection() { + if (blockDetectionThread != null) { + try { + blockDetectionThread.interrupt(); // best effort + blockDetectionThread.join(getStoppingTimeoutInMillis()); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + blockDetectionThread = null; + } + } + @Override public void removeAndEnsureHealthy(InternalTestCluster cluster) { removeFromCluster(cluster); @@ -144,7 +209,7 @@ public class LongGCDisruption extends SingleNodeDisruption { */ @SuppressWarnings("deprecation") // stops/resumes threads intentionally @SuppressForbidden(reason = "stops/resumes threads intentionally") - protected boolean stopNodeThreads(String node, Set nodeThreads) { + protected boolean stopNodeThreads(Set nodeThreads) { Thread[] allThreads = null; while (allThreads == null) { allThreads = new Thread[Thread.activeCount()]; @@ -154,16 +219,15 @@ public class LongGCDisruption extends SingleNodeDisruption { } } boolean liveThreadsFound = false; - final String nodeThreadNamePart = "[" + node + "]"; for (Thread thread : allThreads) { if (thread == null) { continue; } - String name = thread.getName(); - if (name.contains(nodeThreadNamePart)) { + String threadName = thread.getName(); + if (isDisruptedNodeThread(threadName)) { if (thread.isAlive() && nodeThreads.add(thread)) { liveThreadsFound = true; - logger.trace("stopping thread [{}]", name); + logger.trace("stopping thread [{}]", threadName); thread.suspend(); // double check the thread is not in a shared resource like logging. If so, let it go and come back.. boolean safe = true; @@ -178,7 +242,7 @@ public class LongGCDisruption extends SingleNodeDisruption { } } if (!safe) { - logger.trace("resuming thread [{}] as it is in a critical section", name); + logger.trace("resuming thread [{}] as it is in a critical section", threadName); thread.resume(); nodeThreads.remove(thread); } @@ -198,6 +262,28 @@ public class LongGCDisruption extends SingleNodeDisruption { return TimeValue.timeValueSeconds(30).getMillis(); } + public boolean isBlockDetectionSupported() { + return threadBean.isObjectMonitorUsageSupported() && threadBean.isSynchronizerUsageSupported(); + } + + // for testing + protected long getBlockDetectionIntervalInMillis() { + return 3000L; + } + + // for testing + protected void onBlockDetected(ThreadInfo blockedThread, @Nullable ThreadInfo blockingThread) { + String blockedThreadStackTrace = stackTrace(blockedThread.getStackTrace()); + String blockingThreadStackTrace = blockingThread != null ? + stackTrace(blockingThread.getStackTrace()) : "not available"; + throw new AssertionError("Thread [" + blockedThread.getThreadName() + "] is blocked waiting on the resource [" + + blockedThread.getLockInfo() + "] held by the suspended thread [" + blockedThread.getLockOwnerName() + + "] of the disrupted node [" + disruptedNode + "].\n" + + "Please add this occurrence to the unsafeClasses list in [" + LongGCDisruption.class.getName() + "].\n" + + "Stack trace of blocked thread: " + blockedThreadStackTrace + "\n" + + "Stack trace of blocking thread: " + blockingThreadStackTrace); + } + @SuppressWarnings("deprecation") // stops/resumes threads intentionally @SuppressForbidden(reason = "stops/resumes threads intentionally") protected void resumeThreads(Set threads) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruptionTest.java b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruptionTest.java index 38190444758..a5cd7c30723 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruptionTest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/disruption/LongGCDisruptionTest.java @@ -18,11 +18,15 @@ */ package org.elasticsearch.test.disruption; +import org.elasticsearch.common.Nullable; import org.elasticsearch.test.ESTestCase; +import java.lang.management.ThreadInfo; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.locks.ReentrantLock; import java.util.regex.Pattern; @@ -148,4 +152,94 @@ public class LongGCDisruptionTest extends ESTestCase { stop.set(true); } } + + public void testBlockDetection() throws Exception { + final String disruptedNodeName = "disrupted_node"; + final String blockedNodeName = "blocked_node"; + CountDownLatch waitForBlockDetectionResult = new CountDownLatch(1); + AtomicReference blockDetectionResult = new AtomicReference<>(); + LongGCDisruption disruption = new LongGCDisruption(random(), disruptedNodeName) { + @Override + protected Pattern[] getUnsafeClasses() { + return new Pattern[0]; + } + + @Override + protected void onBlockDetected(ThreadInfo blockedThread, @Nullable ThreadInfo blockingThread) { + blockDetectionResult.set(blockedThread); + waitForBlockDetectionResult.countDown(); + } + + @Override + protected long getBlockDetectionIntervalInMillis() { + return 10L; + } + }; + if (disruption.isBlockDetectionSupported() == false) { + return; + } + final AtomicBoolean stop = new AtomicBoolean(); + final CountDownLatch underLock = new CountDownLatch(1); + final CountDownLatch pauseUnderLock = new CountDownLatch(1); + final LockedExecutor lockedExecutor = new LockedExecutor(); + final AtomicLong ops = new AtomicLong(); + try { + for (int i = 0; i < 5; i++) { + // at least one locked and one none lock thread + final boolean lockedExec = (i < 4 && randomBoolean()) || i == 0; + Thread thread = new Thread(() -> { + while (stop.get() == false) { + if (lockedExec) { + lockedExecutor.executeLocked(() -> { + try { + underLock.countDown(); + ops.incrementAndGet(); + pauseUnderLock.await(); + } catch (InterruptedException e) { + + } + }); + } else { + ops.incrementAndGet(); + } + } + }); + + thread.setName("[" + disruptedNodeName + "][" + i + "]"); + thread.start(); + } + + for (int i = 0; i < 5; i++) { + // at least one locked and one none lock thread + final boolean lockedExec = (i < 4 && randomBoolean()) || i == 0; + Thread thread = new Thread(() -> { + while (stop.get() == false) { + if (lockedExec) { + lockedExecutor.executeLocked(() -> { + ops.incrementAndGet(); + }); + } else { + ops.incrementAndGet(); + } + } + }); + thread.setName("[" + blockedNodeName + "][" + i + "]"); + thread.start(); + } + // make sure some threads of test_node are under lock + underLock.await(); + disruption.startDisrupting(); + waitForBlockDetectionResult.await(30, TimeUnit.SECONDS); + disruption.stopDisrupting(); + + ThreadInfo threadInfo = blockDetectionResult.get(); + assertNotNull(threadInfo); + assertThat(threadInfo.getThreadName(), containsString("[" + blockedNodeName + "]")); + assertThat(threadInfo.getLockOwnerName(), containsString("[" + disruptedNodeName + "]")); + assertThat(threadInfo.getLockInfo().getClassName(), containsString(ReentrantLock.class.getName())); + } finally { + stop.set(true); + pauseUnderLock.countDown(); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index e05057648cc..b32675572e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -37,7 +37,10 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import javax.net.ssl.SSLContext; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; @@ -53,8 +56,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import javax.net.ssl.SSLContext; - import static java.util.Collections.singletonMap; import static java.util.Collections.sort; import static java.util.Collections.unmodifiableList; @@ -62,7 +63,7 @@ import static java.util.Collections.unmodifiableList; /** * Superclass for tests that interact with an external test cluster using Elasticsearch's {@link RestClient}. */ -public class ESRestTestCase extends ESTestCase { +public abstract class ESRestTestCase extends ESTestCase { public static final String TRUSTSTORE_PATH = "truststore.path"; public static final String TRUSTSTORE_PASSWORD = "truststore.password"; @@ -76,66 +77,79 @@ public class ESRestTestCase extends ESTestCase { } } - private final List clusterHosts; + private static List clusterHosts; /** - * A client for the running Elasticsearch cluster. Lazily initialized on first use. + * A client for the running Elasticsearch cluster */ - private final RestClient client; + private static RestClient client; /** * A client for the running Elasticsearch cluster configured to take test administrative actions like remove all indexes after the test - * completes. Lazily initialized on first use. + * completes */ - private final RestClient adminClient; + private static RestClient adminClient; - public ESRestTestCase() { - String cluster = System.getProperty("tests.rest.cluster"); - if (cluster == null) { - throw new RuntimeException("Must specify [tests.rest.cluster] system property with a comma delimited list of [host:port] " - + "to which to send REST requests"); - } - String[] stringUrls = cluster.split(","); - List clusterHosts = new ArrayList<>(stringUrls.length); - for (String stringUrl : stringUrls) { - int portSeparator = stringUrl.lastIndexOf(':'); - if (portSeparator < 0) { - throw new IllegalArgumentException("Illegal cluster url [" + stringUrl + "]"); + @Before + public void initClient() throws IOException { + if (client == null) { + assert adminClient == null; + assert clusterHosts == null; + String cluster = System.getProperty("tests.rest.cluster"); + if (cluster == null) { + throw new RuntimeException("Must specify [tests.rest.cluster] system property with a comma delimited list of [host:port] " + + "to which to send REST requests"); } - String host = stringUrl.substring(0, portSeparator); - int port = Integer.valueOf(stringUrl.substring(portSeparator + 1)); - clusterHosts.add(new HttpHost(host, port, getProtocol())); - } - this.clusterHosts = unmodifiableList(clusterHosts); - try { + String[] stringUrls = cluster.split(","); + List hosts = new ArrayList<>(stringUrls.length); + for (String stringUrl : stringUrls) { + int portSeparator = stringUrl.lastIndexOf(':'); + if (portSeparator < 0) { + throw new IllegalArgumentException("Illegal cluster url [" + stringUrl + "]"); + } + String host = stringUrl.substring(0, portSeparator); + int port = Integer.valueOf(stringUrl.substring(portSeparator + 1)); + hosts.add(new HttpHost(host, port, getProtocol())); + } + clusterHosts = unmodifiableList(hosts); + logger.info("initializing REST clients against {}", clusterHosts); client = buildClient(restClientSettings()); adminClient = buildClient(restAdminSettings()); - } catch (IOException e) { - // Wrap the IOException so children don't have to declare a constructor just to rethrow it. - throw new RuntimeException("Error building clients", e); } + assert client != null; + assert adminClient != null; + assert clusterHosts != null; } - /** * Clean up after the test case. */ @After - public final void after() throws Exception { + public final void cleanUpCluster() throws Exception { wipeCluster(); logIfThereAreRunningTasks(); - closeClients(); + } + + @AfterClass + public static void closeClients() throws IOException { + try { + IOUtils.close(client, adminClient); + } finally { + clusterHosts = null; + client = null; + adminClient = null; + } } /** - * Get a client, building it if it hasn't been built for this test. + * Get the client used for ordinary api calls while writing a test */ - protected final RestClient client() { + protected static RestClient client() { return client; } /** * Get the client used for test administrative actions. Do not use this while writing a test. Only use it for cleaning up after tests. */ - protected final RestClient adminClient() { + protected static RestClient adminClient() { return adminClient; } @@ -230,10 +244,6 @@ public class ESRestTestCase extends ESTestCase { */ } - private void closeClients() throws IOException { - IOUtils.close(client, adminClient); - } - /** * Used to obtain settings for the REST client that is used to send REST requests. */ @@ -262,8 +272,9 @@ public class ESRestTestCase extends ESTestCase { return "http"; } - private RestClient buildClient(Settings settings) throws IOException { - RestClientBuilder builder = RestClient.builder(clusterHosts.toArray(new HttpHost[0])).setMaxRetryTimeoutMillis(30000) + private static RestClient buildClient(Settings settings) throws IOException { + RestClientBuilder builder = RestClient.builder(clusterHosts.toArray(new HttpHost[clusterHosts.size()])) + .setMaxRetryTimeoutMillis(30000) .setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setSocketTimeout(30000)); String keystorePath = settings.get(TRUSTSTORE_PATH); if (keystorePath != null) { @@ -314,5 +325,4 @@ public class ESRestTestCase extends ESTestCase { } return runningTasks; } - } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java index 14affcaf3eb..ff829aafee8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java @@ -19,7 +19,6 @@ package org.elasticsearch.test.rest.yaml; import com.carrotsearch.randomizedtesting.RandomizedTest; - import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; @@ -32,13 +31,11 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestPath; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; -import org.junit.BeforeClass; import java.io.IOException; import java.net.URI; @@ -61,94 +58,16 @@ public class ClientYamlTestClient { //query_string params that don't need to be declared in the spec, they are supported by default private static final Set ALWAYS_ACCEPTED_QUERY_STRING_PARAMS = Sets.newHashSet("pretty", "source", "filter_path"); - private static boolean loggedInit = false; - private final ClientYamlSuiteRestSpec restSpec; private final RestClient restClient; private final Version esVersion; - public ClientYamlTestClient(ClientYamlSuiteRestSpec restSpec, RestClient restClient, List hosts) throws IOException { + public ClientYamlTestClient(ClientYamlSuiteRestSpec restSpec, RestClient restClient, List hosts, + Version esVersion) throws IOException { assert hosts.size() > 0; this.restSpec = restSpec; this.restClient = restClient; - Tuple versionTuple = readMasterAndMinNodeVersion(hosts.size()); - this.esVersion = versionTuple.v1(); - Version masterVersion = versionTuple.v2(); - if (false == loggedInit) { - /* This will be logged once per suite which lines up with randomized runner's dumping the output of all failing suites. It'd - * be super noisy to log this once per test. We can't log it in a @BeforeClass method because we need the class variables. */ - logger.info("initializing client, minimum es version: [{}] master version: [{}] hosts: {}", esVersion, masterVersion, hosts); - loggedInit = true; - } - } - - /** - * Reset {@link #loggedInit} so we log the connection setup before this suite. - */ - @BeforeClass - public static void clearLoggedInit() { - loggedInit = false; - } - - private Tuple readMasterAndMinNodeVersion(int numHosts) throws IOException { - try { - // we simply go to the _cat/nodes API and parse all versions in the cluster - Response response = restClient.performRequest("GET", "/_cat/nodes", Collections.singletonMap("h", "version,master")); - ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); - String nodesCatResponse = restTestResponse.getBodyAsString(); - String[] split = nodesCatResponse.split("\n"); - Version version = null; - Version masterVersion = null; - for (String perNode : split) { - final String[] versionAndMaster = perNode.split("\\s+"); - assert versionAndMaster.length == 2 : "invalid line: " + perNode + " length: " + versionAndMaster.length; - final Version currentVersion = Version.fromString(versionAndMaster[0]); - final boolean master = versionAndMaster[1].trim().equals("*"); - if (master) { - assert masterVersion == null; - masterVersion = currentVersion; - } - if (version == null) { - version = currentVersion; - } else if (version.onOrAfter(currentVersion)) { - version = currentVersion; - } - } - return new Tuple<>(version, masterVersion); - } catch (ResponseException ex) { - if (ex.getResponse().getStatusLine().getStatusCode() == 403) { - logger.warn("Fallback to simple info '/' request, _cat/nodes is not authorized"); - final Version version = readAndCheckVersion(numHosts); - return new Tuple<>(version, version); - } - throw ex; - } - } - - private Version readAndCheckVersion(int numHosts) throws IOException { - ClientYamlSuiteRestApi restApi = restApi("info"); - assert restApi.getPaths().size() == 1; - assert restApi.getMethods().size() == 1; - Version version = null; - for (int i = 0; i < numHosts; i++) { - //we don't really use the urls here, we rely on the client doing round-robin to touch all the nodes in the cluster - String method = restApi.getMethods().get(0); - String endpoint = restApi.getPaths().get(0); - Response response = restClient.performRequest(method, endpoint); - ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); - - Object latestVersion = restTestResponse.evaluate("version.number"); - if (latestVersion == null) { - throw new RuntimeException("elasticsearch version not found in the response"); - } - final Version currentVersion = Version.fromString(restTestResponse.evaluate("version.number").toString()); - if (version == null) { - version = currentVersion; - } else if (version.onOrAfter(currentVersion)) { - version = currentVersion; - } - } - return version; + this.esVersion = esVersion; } public Version getEsVersion() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index 2f1e42c12cb..5bc380c3c2d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -18,13 +18,10 @@ */ package org.elasticsearch.test.rest.yaml; -import org.apache.http.HttpHost; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; -import org.elasticsearch.client.RestClient; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import java.io.IOException; import java.util.HashMap; @@ -42,15 +39,12 @@ public class ClientYamlTestExecutionContext { private static final Logger logger = Loggers.getLogger(ClientYamlTestExecutionContext.class); private final Stash stash = new Stash(); - - private final ClientYamlSuiteRestSpec restSpec; - - private ClientYamlTestClient restTestClient; + private final ClientYamlTestClient clientYamlTestClient; private ClientYamlTestResponse response; - public ClientYamlTestExecutionContext(ClientYamlSuiteRestSpec restSpec) { - this.restSpec = restSpec; + public ClientYamlTestExecutionContext(ClientYamlTestClient clientYamlTestClient) { + this.clientYamlTestClient = clientYamlTestClient; } /** @@ -104,7 +98,7 @@ public class ClientYamlTestExecutionContext { private ClientYamlTestResponse callApiInternal(String apiName, Map params, String body, Map headers) throws IOException { - return restTestClient.callApi(apiName, params, body, headers); + return clientYamlTestClient.callApi(apiName, params, body, headers); } /** @@ -114,13 +108,6 @@ public class ClientYamlTestExecutionContext { return response.evaluate(path, stash); } - /** - * Creates the embedded REST client when needed. Needs to be called before each test. - */ - public void initClient(RestClient client, List hosts) throws IOException { - restTestClient = new ClientYamlTestClient(restSpec, client, hosts); - } - /** * Clears the last obtained response and the stashed fields */ @@ -138,7 +125,7 @@ public class ClientYamlTestExecutionContext { * Returns the current es version as a string */ public Version esVersion() { - return restTestClient.getEsVersion(); + return clientYamlTestClient.getEsVersion(); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index d44ce71decb..a9b61aae54f 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -20,24 +20,27 @@ package org.elasticsearch.test.rest.yaml; import com.carrotsearch.randomizedtesting.RandomizedTest; - +import org.apache.http.HttpHost; import org.apache.lucene.util.IOUtils; +import org.elasticsearch.Version; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException; import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestSuiteParser; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; +import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSuite; import org.elasticsearch.test.rest.yaml.section.DoSection; import org.elasticsearch.test.rest.yaml.section.ExecutableSection; -import org.elasticsearch.test.rest.yaml.section.SkipSection; -import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection; import org.junit.AfterClass; import org.junit.Before; -import org.junit.BeforeClass; import java.io.IOException; import java.io.InputStream; @@ -51,7 +54,6 @@ import java.nio.file.Path; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.Collections; -import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; @@ -74,13 +76,13 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { /** * Property that allows to control whether spec validation is enabled or not (default true). */ - public static final String REST_TESTS_VALIDATE_SPEC = "tests.rest.validate_spec"; + private static final String REST_TESTS_VALIDATE_SPEC = "tests.rest.validate_spec"; /** * Property that allows to control where the REST spec files need to be loaded from */ public static final String REST_TESTS_SPEC = "tests.rest.spec"; - public static final String REST_LOAD_PACKAGED_TESTS = "tests.rest.load_packaged"; + private static final String REST_LOAD_PACKAGED_TESTS = "tests.rest.load_packaged"; private static final String DEFAULT_TESTS_PATH = "/rest-api-spec/test"; private static final String DEFAULT_SPEC_PATH = "/rest-api-spec/api"; @@ -96,19 +98,84 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { */ private static final String PATHS_SEPARATOR = "(? blacklistPathMatchers = new ArrayList<>(); + private static List blacklistPathMatchers; private static ClientYamlTestExecutionContext restTestExecutionContext; private static ClientYamlTestExecutionContext adminExecutionContext; private final ClientYamlTestCandidate testCandidate; - public ESClientYamlSuiteTestCase(ClientYamlTestCandidate testCandidate) { + protected ESClientYamlSuiteTestCase(ClientYamlTestCandidate testCandidate) { this.testCandidate = testCandidate; - String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); - for (String entry : blacklist) { - this.blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); + } + + @Before + public void initAndResetContext() throws IOException { + if (restTestExecutionContext == null) { + assert adminExecutionContext == null; + assert blacklistPathMatchers == null; + String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); + blacklistPathMatchers = new ArrayList<>(); + for (String entry : blacklist) { + blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry)); + } + String[] specPaths = resolvePathsProperty(REST_TESTS_SPEC, DEFAULT_SPEC_PATH); + ClientYamlSuiteRestSpec restSpec = null; + FileSystem fileSystem = getFileSystem(); + // don't make a try-with, getFileSystem returns null + // ... and you can't close() the default filesystem + try { + restSpec = ClientYamlSuiteRestSpec.parseFrom(fileSystem, DEFAULT_SPEC_PATH, specPaths); + } finally { + IOUtils.close(fileSystem); + } + validateSpec(restSpec); + List hosts = getClusterHosts(); + RestClient restClient = client(); + Version esVersion; + try { + Tuple versionVersionTuple = readVersionsFromCatNodes(restClient); + esVersion = versionVersionTuple.v1(); + Version masterVersion = versionVersionTuple.v2(); + logger.info("initializing yaml client, minimum es version: [{}] master version: [{}] hosts: {}", + esVersion, masterVersion, hosts); + } catch (ResponseException ex) { + if (ex.getResponse().getStatusLine().getStatusCode() == 403) { + logger.warn("Fallback to simple info '/' request, _cat/nodes is not authorized"); + esVersion = readVersionsFromInfo(restClient, hosts.size()); + logger.info("initializing yaml client, minimum es version: [{}] hosts: {}", esVersion, hosts); + } else { + throw ex; + } + } + ClientYamlTestClient clientYamlTestClient = new ClientYamlTestClient(restSpec, restClient, hosts, esVersion); + restTestExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient); + adminExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient); + } + assert restTestExecutionContext != null; + assert adminExecutionContext != null; + assert blacklistPathMatchers != null; + + // admin context must be available for @After always, regardless of whether the test was blacklisted + adminExecutionContext.clear(); + + //skip test if it matches one of the blacklist globs + for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) { + String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName(); + assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher + .isSuffixMatch(testPath)); } + restTestExecutionContext.clear(); + + //skip test if the whole suite (yaml file) is disabled + assumeFalse(testCandidate.getSetupSection().getSkipSection().getSkipMessage(testCandidate.getSuitePath()), + testCandidate.getSetupSection().getSkipSection().skip(restTestExecutionContext.esVersion())); + //skip test if the whole suite (yaml file) is disabled + assumeFalse(testCandidate.getTeardownSection().getSkipSection().getSkipMessage(testCandidate.getSuitePath()), + testCandidate.getTeardownSection().getSkipSection().skip(restTestExecutionContext.esVersion())); + //skip test if test section is disabled + assumeFalse(testCandidate.getTestSection().getSkipSection().getSkipMessage(testCandidate.getTestPath()), + testCandidate.getTestSection().getSkipSection().skip(restTestExecutionContext.esVersion())); } @Override @@ -118,7 +185,6 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { } public static Iterable createParameters() throws IOException, ClientYamlTestParseException { - //parse tests only if rest test group is enabled, otherwise rest tests might not even be available on file system List restTestCandidates = collectTestCandidates(); List objects = new ArrayList<>(); for (ClientYamlTestCandidate restTestCandidate : restTestCandidates) { @@ -151,12 +217,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { } //sort the candidates so they will always be in the same order before being shuffled, for repeatability - Collections.sort(testCandidates, new Comparator() { - @Override - public int compare(ClientYamlTestCandidate o1, ClientYamlTestCandidate o2) { - return o1.getTestPath().compareTo(o2.getTestPath()); - } - }); + Collections.sort(testCandidates, (o1, o2) -> o1.getTestPath().compareTo(o2.getTestPath())); return testCandidates; } @@ -175,7 +236,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { * are available from classpath. */ @SuppressForbidden(reason = "proper use of URL, hack around a JDK bug") - static FileSystem getFileSystem() throws IOException { + protected static FileSystem getFileSystem() throws IOException { // REST suite handling is currently complicated, with lots of filtering and so on // For now, to work embedded in a jar, return a ZipFileSystem over the jar contents. URL codeLocation = FileUtils.class.getProtectionDomain().getCodeSource().getLocation(); @@ -189,7 +250,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { try (InputStream in = codeLocation.openStream()) { Files.copy(in, tmp, StandardCopyOption.REPLACE_EXISTING); } - return FileSystems.newFileSystem(new URI("jar:" + tmp.toUri()), Collections.emptyMap()); + return FileSystems.newFileSystem(new URI("jar:" + tmp.toUri()), Collections.emptyMap()); } catch (URISyntaxException e) { throw new IOException("couldn't open zipfilesystem: ", e); } @@ -198,23 +259,6 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { } } - @BeforeClass - public static void initExecutionContext() throws IOException { - String[] specPaths = resolvePathsProperty(REST_TESTS_SPEC, DEFAULT_SPEC_PATH); - ClientYamlSuiteRestSpec restSpec = null; - FileSystem fileSystem = getFileSystem(); - // don't make a try-with, getFileSystem returns null - // ... and you can't close() the default filesystem - try { - restSpec = ClientYamlSuiteRestSpec.parseFrom(fileSystem, DEFAULT_SPEC_PATH, specPaths); - } finally { - IOUtils.close(fileSystem); - } - validateSpec(restSpec); - restTestExecutionContext = new ClientYamlTestExecutionContext(restSpec); - adminExecutionContext = new ClientYamlTestExecutionContext(restSpec); - } - protected ClientYamlTestExecutionContext getAdminExecutionContext() { return adminExecutionContext; } @@ -238,35 +282,55 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { @AfterClass public static void clearStatic() { + blacklistPathMatchers = null; restTestExecutionContext = null; adminExecutionContext = null; } - @Before - public void reset() throws IOException { - // admin context must be available for @After always, regardless of whether the test was blacklisted - adminExecutionContext.initClient(adminClient(), getClusterHosts()); - adminExecutionContext.clear(); - - //skip test if it matches one of the blacklist globs - for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) { - String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName(); - assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher - .isSuffixMatch(testPath)); + private static Tuple readVersionsFromCatNodes(RestClient restClient) throws IOException { + // we simply go to the _cat/nodes API and parse all versions in the cluster + Response response = restClient.performRequest("GET", "/_cat/nodes", Collections.singletonMap("h", "version,master")); + ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); + String nodesCatResponse = restTestResponse.getBodyAsString(); + String[] split = nodesCatResponse.split("\n"); + Version version = null; + Version masterVersion = null; + for (String perNode : split) { + final String[] versionAndMaster = perNode.split("\\s+"); + assert versionAndMaster.length == 2 : "invalid line: " + perNode + " length: " + versionAndMaster.length; + final Version currentVersion = Version.fromString(versionAndMaster[0]); + final boolean master = versionAndMaster[1].trim().equals("*"); + if (master) { + assert masterVersion == null; + masterVersion = currentVersion; + } + if (version == null) { + version = currentVersion; + } else if (version.onOrAfter(currentVersion)) { + version = currentVersion; + } } - //The client needs non static info to get initialized, therefore it can't be initialized in the before class - restTestExecutionContext.initClient(client(), getClusterHosts()); - restTestExecutionContext.clear(); + return new Tuple<>(version, masterVersion); + } - //skip test if the whole suite (yaml file) is disabled - assumeFalse(testCandidate.getSetupSection().getSkipSection().getSkipMessage(testCandidate.getSuitePath()), - testCandidate.getSetupSection().getSkipSection().skip(restTestExecutionContext.esVersion())); - //skip test if the whole suite (yaml file) is disabled - assumeFalse(testCandidate.getTeardownSection().getSkipSection().getSkipMessage(testCandidate.getSuitePath()), - testCandidate.getTeardownSection().getSkipSection().skip(restTestExecutionContext.esVersion())); - //skip test if test section is disabled - assumeFalse(testCandidate.getTestSection().getSkipSection().getSkipMessage(testCandidate.getTestPath()), - testCandidate.getTestSection().getSkipSection().skip(restTestExecutionContext.esVersion())); + private static Version readVersionsFromInfo(RestClient restClient, int numHosts) throws IOException { + Version version = null; + for (int i = 0; i < numHosts; i++) { + //we don't really use the urls here, we rely on the client doing round-robin to touch all the nodes in the cluster + Response response = restClient.performRequest("GET", "/"); + ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); + Object latestVersion = restTestResponse.evaluate("version.number"); + if (latestVersion == null) { + throw new RuntimeException("elasticsearch version not found in the response"); + } + final Version currentVersion = Version.fromString(latestVersion.toString()); + if (version == null) { + version = currentVersion; + } else if (version.onOrAfter(currentVersion)) { + version = currentVersion; + } + } + return version; } public void test() throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java index 6b3ed0bbad0..7dd4569dc56 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/CapturingTransport.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.RemoteTransportException; import org.elasticsearch.transport.SendRequestTransportException; import org.elasticsearch.transport.Transport; @@ -213,23 +214,13 @@ public class CapturingTransport implements Transport { return new TransportAddress[0]; } - @Override - public boolean addressSupported(Class address) { - return false; - } - @Override public boolean nodeConnected(DiscoveryNode node) { return true; } @Override - public void connectToNode(DiscoveryNode node) throws ConnectTransportException { - - } - - @Override - public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException { + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) throws ConnectTransportException { } diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index ac400065386..2790b548b18 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -42,6 +42,7 @@ import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.tasks.MockTaskManager; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.MockTcpTransport; import org.elasticsearch.transport.RequestHandlerRegistry; import org.elasticsearch.transport.Transport; @@ -175,13 +176,9 @@ public final class MockTransportService extends TransportService { */ public void addFailToSendNoConnectRule(TransportAddress transportAddress) { addDelegate(transportAddress, new DelegateTransport(original) { - @Override - public void connectToNode(DiscoveryNode node) throws ConnectTransportException { - throw new ConnectTransportException(node, "DISCONNECT: simulated"); - } @Override - public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException { + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) throws ConnectTransportException { throw new ConnectTransportException(node, "DISCONNECT: simulated"); } @@ -222,14 +219,10 @@ public final class MockTransportService extends TransportService { public void addFailToSendNoConnectRule(TransportAddress transportAddress, final Set blockedActions) { addDelegate(transportAddress, new DelegateTransport(original) { - @Override - public void connectToNode(DiscoveryNode node) throws ConnectTransportException { - original.connectToNode(node); - } @Override - public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException { - original.connectToNodeLight(node); + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) throws ConnectTransportException { + original.connectToNode(node, connectionProfile); } @Override @@ -260,13 +253,9 @@ public final class MockTransportService extends TransportService { */ public void addUnresponsiveRule(TransportAddress transportAddress) { addDelegate(transportAddress, new DelegateTransport(original) { - @Override - public void connectToNode(DiscoveryNode node) throws ConnectTransportException { - throw new ConnectTransportException(node, "UNRESPONSIVE: simulated"); - } @Override - public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException { + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) throws ConnectTransportException { throw new ConnectTransportException(node, "UNRESPONSIVE: simulated"); } @@ -308,10 +297,10 @@ public final class MockTransportService extends TransportService { } @Override - public void connectToNode(DiscoveryNode node) throws ConnectTransportException { + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) throws ConnectTransportException { TimeValue delay = getDelay(); if (delay.millis() <= 0) { - original.connectToNode(node); + original.connectToNode(node, connectionProfile); return; } @@ -320,30 +309,7 @@ public final class MockTransportService extends TransportService { try { if (delay.millis() < connectingTimeout.millis()) { Thread.sleep(delay.millis()); - original.connectToNode(node); - } else { - Thread.sleep(connectingTimeout.millis()); - throw new ConnectTransportException(node, "UNRESPONSIVE: simulated"); - } - } catch (InterruptedException e) { - throw new ConnectTransportException(node, "UNRESPONSIVE: interrupted while sleeping", e); - } - } - - @Override - public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException { - TimeValue delay = getDelay(); - if (delay.millis() <= 0) { - original.connectToNodeLight(node); - return; - } - - // TODO: Replace with proper setting - TimeValue connectingTimeout = NetworkService.TcpSettings.TCP_CONNECT_TIMEOUT.getDefault(Settings.EMPTY); - try { - if (delay.millis() < connectingTimeout.millis()) { - Thread.sleep(delay.millis()); - original.connectToNodeLight(node); + original.connectToNode(node, connectionProfile); } else { Thread.sleep(connectingTimeout.millis()); throw new ConnectTransportException(node, "UNRESPONSIVE: simulated"); @@ -461,14 +427,10 @@ public final class MockTransportService extends TransportService { return getTransport(node).nodeConnected(node); } - @Override - public void connectToNode(DiscoveryNode node) throws ConnectTransportException { - getTransport(node).connectToNode(node); - } @Override - public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException { - getTransport(node).connectToNodeLight(node); + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) throws ConnectTransportException { + getTransport(node).connectToNode(node, connectionProfile); } @Override @@ -511,24 +473,14 @@ public final class MockTransportService extends TransportService { return transport.addressesFromString(address, perAddressLimit); } - @Override - public boolean addressSupported(Class address) { - return transport.addressSupported(address); - } - @Override public boolean nodeConnected(DiscoveryNode node) { return transport.nodeConnected(node); } @Override - public void connectToNode(DiscoveryNode node) throws ConnectTransportException { - transport.connectToNode(node); - } - - @Override - public void connectToNodeLight(DiscoveryNode node) throws ConnectTransportException { - transport.connectToNodeLight(node); + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) throws ConnectTransportException { + transport.connectToNode(node, connectionProfile); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index ad37a7cacb3..434990afba9 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -21,6 +21,7 @@ package org.elasticsearch.transport; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.apache.lucene.util.Constants; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListenerResponseHandler; @@ -44,7 +45,12 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.ServerSocket; +import java.sql.Time; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -1310,7 +1316,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } try { - serviceB.connectToNodeLightAndHandshake(nodeA, 100); + serviceB.connectToNodeAndHandshake(nodeA, 100); fail("exception should be thrown"); } catch (ConnectTransportException e) { // all is well @@ -1368,7 +1374,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { } try { - serviceB.connectToNodeLightAndHandshake(nodeA, 100); + serviceB.connectToNodeAndHandshake(nodeA, 100); fail("exception should be thrown"); } catch (ConnectTransportException e) { // all is well @@ -1721,4 +1727,46 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { serviceA.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), (request, message) -> {throw new AssertionError("boom");}); } + + public void testTimeoutPerConnection() throws IOException { + assumeTrue("Works only on BSD network stacks and apparently windows", + Constants.MAC_OS_X || Constants.FREE_BSD || Constants.WINDOWS); + try (ServerSocket socket = new ServerSocket()) { + // note - this test uses backlog=1 which is implementation specific ie. it might not work on some TCP/IP stacks + // on linux (at least newer ones) the listen(addr, backlog=1) should just ignore new connections if the queue is full which + // means that once we received an ACK from the client we just drop the packet on the floor (which is what we want) and we run + // into a connection timeout quickly. Yet other implementations can for instance can terminate the connection within the 3 way + // handshake which I haven't tested yet. + socket.bind(new InetSocketAddress(InetAddress.getLocalHost(), 0), 1); + socket.setReuseAddress(true); + DiscoveryNode first = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(), + socket.getLocalPort()), emptyMap(), + emptySet(), version0); + DiscoveryNode second = new DiscoveryNode("TEST", new TransportAddress(socket.getInetAddress(), + socket.getLocalPort()), emptyMap(), + emptySet(), version0); + ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); + builder.addConnections(1, + TransportRequestOptions.Type.BULK, + TransportRequestOptions.Type.PING, + TransportRequestOptions.Type.RECOVERY, + TransportRequestOptions.Type.REG, + TransportRequestOptions.Type.STATE); + + // connection with one connection and a large timeout -- should consume the one spot in the backlog queue + serviceA.connectToNode(first, builder.build()); + builder.setConnectTimeout(TimeValue.timeValueMillis(1)); + final ConnectionProfile profile = builder.build(); + // now with the 1ms timeout we got and test that is it's applied + long startTime = System.nanoTime(); + ConnectTransportException ex = expectThrows(ConnectTransportException.class, () -> { + serviceA.connectToNode(second, profile); + }); + final long now = System.nanoTime(); + final long timeTaken = TimeValue.nsecToMSec(now - startTime); + assertTrue("test didn't timeout quick enough, time taken: [" + timeTaken + "]", + timeTaken < TimeValue.timeValueSeconds(5).millis()); + assertEquals(ex.getMessage(), "[][" + second.getAddress() + "] connect_timeout[1ms]"); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index 84d0bed8c04..e9a97e030b2 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.CancellableThreads; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -46,6 +47,7 @@ import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketException; +import java.net.SocketTimeoutException; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -157,17 +159,9 @@ public class MockTcpTransport extends TcpTransport } @Override - protected NodeChannels connectToChannelsLight(DiscoveryNode node) throws IOException { - return connectToChannels(node); - } - - @Override - protected NodeChannels connectToChannels(DiscoveryNode node) throws IOException { - final NodeChannels nodeChannels = new NodeChannels(new MockChannel[1], - new MockChannel[1], - new MockChannel[1], - new MockChannel[1], - new MockChannel[1]); + protected NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile profile) throws IOException { + final MockChannel[] mockChannels = new MockChannel[1]; + final NodeChannels nodeChannels = new NodeChannels(mockChannels, ConnectionProfile.LIGHT_PROFILE); // we always use light here boolean success = false; final Socket socket = new Socket(); try { @@ -186,14 +180,16 @@ public class MockTcpTransport extends TcpTransport final InetSocketAddress address = node.getAddress().address(); // we just use a single connections configureSocket(socket); - socket.connect(address, (int) TCP_CONNECT_TIMEOUT.get(settings).millis()); + final TimeValue connectTimeout = profile.getConnectTimeout() == null ? defaultConnectionProfile.getConnectTimeout() + : profile.getConnectTimeout(); + try { + socket.connect(address, Math.toIntExact(connectTimeout.millis())); + } catch (SocketTimeoutException ex) { + throw new ConnectTransportException(node, "connect_timeout[" + connectTimeout + "]", ex); + } MockChannel channel = new MockChannel(socket, address, "none", onClose); channel.loopRead(executor); - for (MockChannel[] channels : nodeChannels.getChannelArrays()) { - for (int i = 0; i < channels.length; i++) { - channels[i] = channel; - } - } + mockChannels[0] = channel; success = true; } finally { if (success == false) { diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 36903c7c608..0d37b0152bb 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -409,7 +409,6 @@ public class InternalTestClusterTests extends ESTestCase { } public void testTwoNodeCluster() throws Exception { - final boolean autoManageMinMasterNodes = randomBoolean(); NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() { @Override public Settings nodeSettings(int nodeOrdinal) { @@ -428,7 +427,7 @@ public class InternalTestClusterTests extends ESTestCase { boolean enableHttpPipelining = randomBoolean(); String nodePrefix = "test"; Path baseDir = createTempDir(); - InternalTestCluster cluster = new InternalTestCluster(randomLong(), baseDir, false, autoManageMinMasterNodes, 2, 2, + InternalTestCluster cluster = new InternalTestCluster(randomLong(), baseDir, false, true, 2, 2, "test", nodeConfigurationSource, 0, enableHttpPipelining, nodePrefix, Arrays.asList(MockTcpTransportPlugin.class, TestZenDiscovery.TestPlugin.class), Function.identity()); try {