diff --git a/dev-tools/create-bwc-index.py b/dev-tools/create_bwc_index.py similarity index 90% rename from dev-tools/create-bwc-index.py rename to dev-tools/create_bwc_index.py index 6cdf9b36fa3..cb2ceaf2de3 100644 --- a/dev-tools/create-bwc-index.py +++ b/dev-tools/create_bwc_index.py @@ -19,11 +19,15 @@ import glob import logging import os import random +import shutil import subprocess import sys import tempfile import time +DEFAULT_TRANSPORT_TCP_PORT = 9300 +DEFAULT_HTTP_TCP_PORT = 9200 + if sys.version_info[0] < 3: print('%s must use python 3.x (for the ES python client)' % sys.argv[0]) @@ -126,14 +130,17 @@ def build_version(version_tuple): def build_tuple(version_string): return [int(x) for x in version_string.split('.')] -def start_node(version, release_dir, data_dir, tcp_port, http_port): - logging.info('Starting node from %s on port %s/%s' % (release_dir, tcp_port, http_port)) +def start_node(version, release_dir, data_dir, tcp_port=DEFAULT_TRANSPORT_TCP_PORT, http_port=DEFAULT_HTTP_TCP_PORT, cluster_name=None): + logging.info('Starting node from %s on port %s/%s, data_dir %s' % (release_dir, tcp_port, http_port, data_dir)) + if cluster_name is None: + cluster_name = 'bwc_index_' + version + cmd = [ os.path.join(release_dir, 'bin/elasticsearch'), '-Des.path.data=%s' % data_dir, '-Des.path.logs=logs', - '-Des.cluster.name=bwc_index_' + version, - '-Des.network.host=localhost', + '-Des.cluster.name=%s' % cluster_name, + '-Des.network.host=localhost', '-Des.discovery.zen.ping.multicast.enabled=false', '-Des.transport.tcp.port=%s' % tcp_port, '-Des.http.port=%s' % http_port @@ -142,7 +149,7 @@ def start_node(version, release_dir, data_dir, tcp_port, http_port): cmd.append('-f') # version before 1.0 start in background automatically return subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) -def create_client(http_port, timeout=30): +def create_client(http_port=DEFAULT_HTTP_TCP_PORT, timeout=30): logging.info('Waiting for node to startup') for _ in range(0, timeout): # TODO: ask Honza if there is a better way to do this? @@ -158,8 +165,6 @@ def create_client(http_port, timeout=30): def generate_index(client, version, index_name): client.indices.delete(index=index_name, ignore=404) - num_shards = random.randint(1, 10) - num_replicas = random.randint(0, 1) logging.info('Create single shard test index') mappings = {} @@ -226,6 +231,11 @@ def generate_index(client, version, index_name): } } } + mappings['auto_boost'] = { + '_all': { + 'auto_boost': True + } + } client.indices.create(index=index_name, body={ 'settings': { @@ -295,7 +305,7 @@ def compress(tmp_dir, output_dir, zipfile, directory): zipfile = os.path.join(abs_output_dir, zipfile) if os.path.exists(zipfile): os.remove(zipfile) - logging.info('Compressing index into %s', zipfile) + logging.info('Compressing index into %s, tmpDir %s', zipfile, tmp_dir) olddir = os.getcwd() os.chdir(tmp_dir) subprocess.check_call('zip -r %s %s' % (zipfile, directory), shell=True) @@ -313,9 +323,9 @@ def parse_config(): help='The directory containing elasticsearch releases') parser.add_argument('--output-dir', '-o', default='src/test/resources/org/elasticsearch/bwcompat', help='The directory to write the zipped index into') - parser.add_argument('--tcp-port', default=9300, type=int, + parser.add_argument('--tcp-port', default=DEFAULT_TRANSPORT_TCP_PORT, type=int, help='The port to use as the minimum port for TCP communication') - parser.add_argument('--http-port', default=9200, type=int, + parser.add_argument('--http-port', default=DEFAULT_HTTP_TCP_PORT, type=int, help='The port to use as the minimum port for HTTP communication') cfg = parser.parse_args() @@ -334,14 +344,17 @@ def create_bwc_index(cfg, version): logging.info('--> Creating bwc index for %s' % version) release_dir = os.path.join(cfg.releases_dir, 'elasticsearch-%s' % version) if not os.path.exists(release_dir): - parser.error('ES version %s does not exist in %s' % (version, cfg.releases_dir)) + raise RuntimeError('ES version %s does not exist in %s' % (version, cfg.releases_dir)) snapshot_supported = not (version.startswith('0.') or version == '1.0.0.Beta1') tmp_dir = tempfile.mkdtemp() + data_dir = os.path.join(tmp_dir, 'data') repo_dir = os.path.join(tmp_dir, 'repo') logging.info('Temp data dir: %s' % data_dir) logging.info('Temp repo dir: %s' % repo_dir) + node = None + try: node = start_node(version, release_dir, data_dir, cfg.tcp_port, cfg.http_port) client = create_client(cfg.http_port) @@ -354,16 +367,26 @@ def create_bwc_index(cfg, version): # this after the snapshot, because it calls flush. Otherwise the index # will already have the deletions applied on upgrade. delete_by_query(client, version, index_name, 'doc') - - finally: - if 'node' in vars(): - logging.info('Shutting down node with pid %d', node.pid) - node.terminate() - time.sleep(1) # some nodes take time to terminate - compress_index(version, tmp_dir, cfg.output_dir) - if snapshot_supported: - compress_repo(version, tmp_dir, cfg.output_dir) + shutdown_node(node) + node = None + + compress_index(version, tmp_dir, cfg.output_dir) + if snapshot_supported: + compress_repo(version, tmp_dir, cfg.output_dir) + finally: + + if node is not None: + # This only happens if we've hit an exception: + shutdown_node(node) + + shutil.rmtree(tmp_dir) + +def shutdown_node(node): + logging.info('Shutting down node with pid %d', node.pid) + node.terminate() + node.wait() + def main(): logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO, datefmt='%Y-%m-%d %I:%M:%S %p') diff --git a/dev-tools/create_bwc_index_with_some_ancient_segments.py b/dev-tools/create_bwc_index_with_some_ancient_segments.py new file mode 100644 index 00000000000..d1162d4690f --- /dev/null +++ b/dev-tools/create_bwc_index_with_some_ancient_segments.py @@ -0,0 +1,113 @@ +import create_bwc_index +import logging +import os +import random +import shutil +import subprocess +import sys +import tempfile + +def fetch_version(version): + logging.info('fetching ES version %s' % version) + if subprocess.call([sys.executable, os.path.join(os.path.split(sys.argv[0])[0], 'get-bwc-version.py'), version]) != 0: + raise RuntimeError('failed to download ES version %s' % version) + +def main(): + ''' + Creates a static back compat index (.zip) with mixed 0.20 (Lucene 3.x) and 0.90 (Lucene 4.x) segments. + ''' + + logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO, + datefmt='%Y-%m-%d %I:%M:%S %p') + logging.getLogger('elasticsearch').setLevel(logging.ERROR) + logging.getLogger('urllib3').setLevel(logging.WARN) + + tmp_dir = tempfile.mkdtemp() + try: + data_dir = os.path.join(tmp_dir, 'data') + logging.info('Temp data dir: %s' % data_dir) + + first_version = '0.20.6' + second_version = '0.90.6' + index_name = 'index-%s-and-%s' % (first_version, second_version) + + # Download old ES releases if necessary: + release_dir = os.path.join('backwards', 'elasticsearch-%s' % first_version) + if not os.path.exists(release_dir): + fetch_version(first_version) + + node = create_bwc_index.start_node(first_version, release_dir, data_dir, cluster_name=index_name) + client = create_bwc_index.create_client() + + # Creates the index & indexes docs w/ first_version: + create_bwc_index.generate_index(client, first_version, index_name) + + # Make sure we write segments: + flush_result = client.indices.flush(index=index_name) + if not flush_result['ok']: + raise RuntimeError('flush failed: %s' % str(flush_result)) + + segs = client.indices.segments(index=index_name) + shards = segs['indices'][index_name]['shards'] + if len(shards) != 1: + raise RuntimeError('index should have 1 shard but got %s' % len(shards)) + + first_version_segs = shards['0'][0]['segments'].keys() + + create_bwc_index.shutdown_node(node) + print('%s server output:\n%s' % (first_version, node.stdout.read().decode('utf-8'))) + node = None + + release_dir = os.path.join('backwards', 'elasticsearch-%s' % second_version) + if not os.path.exists(release_dir): + fetch_version(second_version) + + # Now also index docs with second_version: + node = create_bwc_index.start_node(second_version, release_dir, data_dir, cluster_name=index_name) + client = create_bwc_index.create_client() + + # If we index too many docs, the random refresh/flush causes the ancient segments to be merged away: + num_docs = 10 + create_bwc_index.index_documents(client, index_name, 'doc', num_docs) + + # Make sure we get a segment: + flush_result = client.indices.flush(index=index_name) + if not flush_result['ok']: + raise RuntimeError('flush failed: %s' % str(flush_result)) + + # Make sure we see mixed segments (it's possible Lucene could have "accidentally" merged away the first_version segments): + segs = client.indices.segments(index=index_name) + shards = segs['indices'][index_name]['shards'] + if len(shards) != 1: + raise RuntimeError('index should have 1 shard but got %s' % len(shards)) + + second_version_segs = shards['0'][0]['segments'].keys() + #print("first: %s" % first_version_segs) + #print("second: %s" % second_version_segs) + + for segment_name in first_version_segs: + if segment_name in second_version_segs: + # Good: an ancient version seg "survived": + break + else: + raise RuntimeError('index has no first_version segs left') + + for segment_name in second_version_segs: + if segment_name not in first_version_segs: + # Good: a second_version segment was written + break + else: + raise RuntimeError('index has no second_version segs left') + + create_bwc_index.shutdown_node(node) + print('%s server output:\n%s' % (second_version, node.stdout.read().decode('utf-8'))) + node = None + create_bwc_index.compress_index('%s-and-%s' % (first_version, second_version), tmp_dir, 'src/test/resources/org/elasticsearch/rest/action/admin/indices/upgrade') + finally: + if node is not None: + create_bwc_index.shutdown_node(node) + shutil.rmtree(tmp_dir) + +if __name__ == '__main__': + main() + diff --git a/docs/reference/indices/upgrade.asciidoc b/docs/reference/indices/upgrade.asciidoc index 5b4ebb9ec79..295a407f979 100644 --- a/docs/reference/indices/upgrade.asciidoc +++ b/docs/reference/indices/upgrade.asciidoc @@ -21,12 +21,30 @@ This call will block until the upgrade is complete. If the http connection is lost, the request will continue in the background, and any new requests will block until the previous upgrade is complete. +[float] +[[upgrade-parameters]] +==== Request Parameters + +The `upgrade` API accepts the following request parameters: + +[horizontal] +`only_ancient_segments`:: If true, only very old segments (from a +previous Lucene major release) will be upgraded. While this will do +the minimal work to ensure the next major release of Elasticsearch can +read the segments, it's dangerous because it can leave other very old +segments in sub-optimal formats. Defaults to `false`. + [float] === Check upgrade status Use a `GET` request to monitor how much of an index is upgraded. This -can also be used prior to starting an upgrade to identify which indices -you want to upgrade at the same time. +can also be used prior to starting an upgrade to identify which +indices you want to upgrade at the same time. + +The `ancient` byte values that are returned indicate total bytes of +segments whose version is extremely old (Lucene major version is +different from the current version), showing how much upgrading is +necessary when you run with `only_ancient_segments=true`. [source,sh] -------------------------------------------------- @@ -41,6 +59,8 @@ curl 'http://localhost:9200/twitter/_upgrade?pretty&human' "size_in_bytes": "21000000000", "size_to_upgrade": "10gb", "size_to_upgrade_in_bytes": "10000000000" + "size_to_upgrade_ancient": "1gb", + "size_to_upgrade_ancient_in_bytes": "1000000000" } } -------------------------------------------------- diff --git a/pom.xml b/pom.xml index 50aeb5bec18..ebe48cb4d74 100644 --- a/pom.xml +++ b/pom.xml @@ -80,7 +80,7 @@ com.carrotsearch.randomizedtesting randomizedtesting-runner - 2.1.11 + 2.1.13 test @@ -501,7 +501,7 @@ com.carrotsearch.randomizedtesting junit4-maven-plugin - 2.1.2 + 2.1.13 tests diff --git a/rest-api-spec/api/indices.upgrade.json b/rest-api-spec/api/indices.upgrade.json index ce8cfdfbe3c..0e5e4ffd244 100644 --- a/rest-api-spec/api/indices.upgrade.json +++ b/rest-api-spec/api/indices.upgrade.json @@ -27,8 +27,12 @@ "description" : "Whether specified concrete indices should be ignored when unavailable (missing or closed)" }, "wait_for_completion": { + "type" : "boolean", + "description" : "Specify whether the request should block until the all segments are upgraded (default: false)" + }, + "only_ancient_segments": { "type" : "boolean", - "description" : "Specify whether the request should block until the all segments are upgraded (default: false)" + "description" : "If true, only ancient (an older Lucene major release) segments will be upgraded" } } }, diff --git a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java index 9994627e0fc..d5b822f58cb 100644 --- a/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java +++ b/src/main/java/org/elasticsearch/action/admin/indices/optimize/OptimizeRequest.java @@ -44,12 +44,14 @@ public class OptimizeRequest extends BroadcastOperationRequest public static final boolean ONLY_EXPUNGE_DELETES = false; public static final boolean FLUSH = true; public static final boolean UPGRADE = false; + public static final boolean UPGRADE_ONLY_ANCIENT_SEGMENTS = false; } private int maxNumSegments = Defaults.MAX_NUM_SEGMENTS; private boolean onlyExpungeDeletes = Defaults.ONLY_EXPUNGE_DELETES; private boolean flush = Defaults.FLUSH; private boolean upgrade = Defaults.UPGRADE; + private boolean upgradeOnlyAncientSegments = Defaults.UPGRADE_ONLY_ANCIENT_SEGMENTS; /** * Constructs an optimization request over one or more indices. @@ -136,6 +138,7 @@ public class OptimizeRequest extends BroadcastOperationRequest onlyExpungeDeletes = in.readBoolean(); flush = in.readBoolean(); upgrade = in.readBoolean(); + upgradeOnlyAncientSegments = in.readBoolean(); } @Override @@ -145,6 +148,23 @@ public class OptimizeRequest extends BroadcastOperationRequest out.writeBoolean(onlyExpungeDeletes); out.writeBoolean(flush); out.writeBoolean(upgrade); + out.writeBoolean(upgradeOnlyAncientSegments); + } + + /** + * Should the merge upgrade only the ancient (older major version of Lucene) segments? + * Defaults to false. + */ + public boolean upgradeOnlyAncientSegments() { + return upgradeOnlyAncientSegments; + } + + /** + * See {@link #upgradeOnlyAncientSegments()} + */ + public OptimizeRequest upgradeOnlyAncientSegments(boolean upgradeOnlyAncientSegments) { + this.upgradeOnlyAncientSegments = upgradeOnlyAncientSegments; + return this; } @Override @@ -154,6 +174,7 @@ public class OptimizeRequest extends BroadcastOperationRequest ", onlyExpungeDeletes=" + onlyExpungeDeletes + ", flush=" + flush + ", upgrade=" + upgrade + + ", upgradeOnlyAncientSegments=" + upgradeOnlyAncientSegments + '}'; } } diff --git a/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java b/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java index 70e2b5039b6..92e4048e962 100644 --- a/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java +++ b/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java @@ -19,11 +19,14 @@ package org.elasticsearch.common.lucene.all; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.payloads.AveragePayloadFunction; import org.apache.lucene.search.payloads.PayloadTermQuery; import org.apache.lucene.search.similarities.Similarity; @@ -119,4 +122,22 @@ public final class AllTermQuery extends PayloadTermQuery { return true; } + @Override + public Query rewrite(IndexReader reader) throws IOException { + boolean hasPayloads = false; + for (LeafReaderContext context : reader.leaves()) { + final Terms terms = context.reader().terms(term.field()); + if (terms.hasPayloads()) { + hasPayloads = true; + break; + } + } + if (hasPayloads == false) { + TermQuery rewritten = new TermQuery(term); + rewritten.setBoost(getBoost()); + return rewritten; + } + return this; + } + } diff --git a/src/main/java/org/elasticsearch/index/engine/Engine.java b/src/main/java/org/elasticsearch/index/engine/Engine.java index 83ebfb72a7c..392a663d293 100644 --- a/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -430,13 +430,13 @@ public abstract class Engine implements Closeable { * Optimizes to 1 segment */ public void forceMerge(boolean flush) { - forceMerge(flush, 1, false, false); + forceMerge(flush, 1, false, false, false); } /** * Triggers a forced merge on this engine */ - public abstract void forceMerge(boolean flush, int maxNumSegments, boolean onlyExpungeDeletes, boolean upgrade) throws EngineException; + public abstract void forceMerge(boolean flush, int maxNumSegments, boolean onlyExpungeDeletes, boolean upgrade, boolean upgradeOnlyAncientSegments) throws EngineException; /** * Snapshots the index and returns a handle to it. Will always try and "commit" the diff --git a/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index ab0088aa4f6..b4db6b93f17 100644 --- a/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -615,7 +615,7 @@ public class InternalEngine extends Engine { } } /* - * Unfortunately the lock order is important here. We have to acquire the readlock fist otherwise + * Unfortunately the lock order is important here. We have to acquire the readlock first otherwise * if we are flushing at the end of the recovery while holding the write lock we can deadlock if: * Thread 1: flushes via API and gets the flush lock but blocks on the readlock since Thread 2 has the writeLock * Thread 2: flushes at the end of the recovery holding the writeLock and blocks on the flushLock owned by Thread 1 @@ -742,7 +742,8 @@ public class InternalEngine extends Engine { } @Override - public void forceMerge(final boolean flush, int maxNumSegments, boolean onlyExpungeDeletes, final boolean upgrade) throws EngineException { + public void forceMerge(final boolean flush, int maxNumSegments, boolean onlyExpungeDeletes, + final boolean upgrade, final boolean upgradeOnlyAncientSegments) throws EngineException { /* * We do NOT acquire the readlock here since we are waiting on the merges to finish * that's fine since the IW.rollback should stop all the threads and trigger an IOException @@ -760,8 +761,8 @@ public class InternalEngine extends Engine { try { ensureOpen(); if (upgrade) { - logger.info("starting segment upgrade"); - mp.setUpgradeInProgress(true); + logger.info("starting segment upgrade upgradeOnlyAncientSegments={}", upgradeOnlyAncientSegments); + mp.setUpgradeInProgress(true, upgradeOnlyAncientSegments); } store.incRef(); // increment the ref just to ensure nobody closes the store while we optimize try { @@ -789,7 +790,7 @@ public class InternalEngine extends Engine { throw ex; } finally { try { - mp.setUpgradeInProgress(false); // reset it just to make sure we reset it in a case of an error + mp.setUpgradeInProgress(false, false); // reset it just to make sure we reset it in a case of an error } finally { optimizeLock.unlock(); } diff --git a/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java b/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java index c49758398bc..31c5a23c578 100644 --- a/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java +++ b/src/main/java/org/elasticsearch/index/engine/ShadowEngine.java @@ -147,7 +147,7 @@ public class ShadowEngine extends Engine { } @Override - public void forceMerge(boolean flush, int maxNumSegments, boolean onlyExpungeDeletes, boolean upgrade) throws EngineException { + public void forceMerge(boolean flush, int maxNumSegments, boolean onlyExpungeDeletes, boolean upgrade, boolean upgradeOnlyAncientSegments) throws EngineException { // no-op logger.trace("skipping FORCE-MERGE on shadow engine"); } diff --git a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index f1693fec303..112e68e49bd 100644 --- a/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.google.common.collect.Sets; + import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -33,6 +34,7 @@ import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Preconditions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.compress.CompressedString; @@ -70,6 +72,7 @@ import org.elasticsearch.script.ScriptService.ScriptType; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -438,10 +441,11 @@ public class DocumentMapper implements ToXContent { ParseContext.InternalParseContext context = cache.get(); if (source.type() != null && !source.type().equals(this.type)) { - throw new MapperParsingException("Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + this.type + "]", context.mappingsModified()); + throw new MapperParsingException("Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + this.type + "]"); } source.type(this.type); + boolean mappingsModified = false; XContentParser parser = source.parser(); try { if (parser == null) { @@ -456,7 +460,7 @@ public class DocumentMapper implements ToXContent { int countDownTokens = 0; XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.START_OBJECT) { - throw new MapperParsingException("Malformed content, must start with an object", context.mappingsModified()); + throw new MapperParsingException("Malformed content, must start with an object"); } boolean emptyDoc = false; token = parser.nextToken(); @@ -464,7 +468,7 @@ public class DocumentMapper implements ToXContent { // empty doc, we can handle it... emptyDoc = true; } else if (token != XContentParser.Token.FIELD_NAME) { - throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist", context.mappingsModified()); + throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist"); } for (RootMapper rootMapper : rootMappersOrdered) { @@ -472,7 +476,31 @@ public class DocumentMapper implements ToXContent { } if (!emptyDoc) { - rootObjectMapper.parse(context); + Mapper update = rootObjectMapper.parse(context); + for (RootObjectMapper mapper : context.updates()) { + if (update == null) { + update = mapper; + } else { + MapperUtils.merge(update, mapper); + } + } + if (update != null) { + // TODO: validate the mapping update on the master node + // lock to avoid concurrency issues with mapping updates coming from the API + synchronized(this) { + // simulate on the first time to check if the mapping update is applicable + MergeContext mergeContext = newMmergeContext(new MergeFlags().simulate(true)); + rootObjectMapper.merge(update, mergeContext); + if (mergeContext.hasConflicts()) { + throw new MapperParsingException("Could not apply generated dynamic mappings: " + Arrays.toString(mergeContext.buildConflicts())); + } else { + // then apply it for real + mappingsModified = true; + mergeContext = newMmergeContext(new MergeFlags().simulate(false)); + rootObjectMapper.merge(update, mergeContext); + } + } + } } for (int i = 0; i < countDownTokens; i++) { @@ -490,10 +518,10 @@ public class DocumentMapper implements ToXContent { // Throw a more meaningful message if the document is empty. if (source.source() != null && source.source().length() == 0) { - throw new MapperParsingException("failed to parse, document is empty", context.mappingsModified()); + throw new MapperParsingException("failed to parse, document is empty"); } - throw new MapperParsingException("failed to parse", e, context.mappingsModified()); + throw new MapperParsingException("failed to parse", e); } finally { // only close the parser when its not provided externally if (source.parser() == null && parser != null) { @@ -521,7 +549,7 @@ public class DocumentMapper implements ToXContent { } ParsedDocument doc = new ParsedDocument(context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(), - context.source(), context.mappingsModified()).parent(source.parent()); + context.source(), mappingsModified).parent(source.parent()); // reset the context to free up memory context.reset(null, null, null, null); return doc; @@ -637,8 +665,41 @@ public class DocumentMapper implements ToXContent { rootObjectMapper.traverse(listener); } + private MergeContext newMmergeContext(MergeFlags mergeFlags) { + return new MergeContext(mergeFlags) { + + List conflicts = new ArrayList<>(); + + @Override + public void addFieldMappers(List> fieldMappers) { + DocumentMapper.this.addFieldMappers(fieldMappers); + } + + @Override + public void addObjectMappers(Collection objectMappers) { + DocumentMapper.this.addObjectMappers(objectMappers); + } + + @Override + public void addConflict(String mergeFailure) { + conflicts.add(mergeFailure); + } + + @Override + public boolean hasConflicts() { + return conflicts.isEmpty() == false; + } + + @Override + public String[] buildConflicts() { + return conflicts.toArray(Strings.EMPTY_ARRAY); + } + + }; + } + public synchronized MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) { - MergeContext mergeContext = new MergeContext(this, mergeFlags); + final MergeContext mergeContext = newMmergeContext(mergeFlags); assert rootMappers.size() == mergeWith.rootMappers.size(); rootObjectMapper.merge(mergeWith.rootObjectMapper, mergeContext); diff --git a/src/main/java/org/elasticsearch/index/mapper/Mapper.java b/src/main/java/org/elasticsearch/index/mapper/Mapper.java index 151940b6c30..ae2f6acbf2d 100644 --- a/src/main/java/org/elasticsearch/index/mapper/Mapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/Mapper.java @@ -125,7 +125,12 @@ public interface Mapper extends ToXContent { String name(); - void parse(ParseContext context) throws IOException; + /** + * Parse using the provided {@link ParseContext} and return a mapping + * update if dynamic mappings modified the mappings, or {@code null} if + * mappings were not modified. + */ + Mapper parse(ParseContext context) throws IOException; void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException; diff --git a/src/main/java/org/elasticsearch/index/mapper/MapperParsingException.java b/src/main/java/org/elasticsearch/index/mapper/MapperParsingException.java index 9cac8a26ef9..25a7e1f9233 100644 --- a/src/main/java/org/elasticsearch/index/mapper/MapperParsingException.java +++ b/src/main/java/org/elasticsearch/index/mapper/MapperParsingException.java @@ -28,28 +28,10 @@ public class MapperParsingException extends MapperException { public MapperParsingException(String message) { super(message); - mappingsModified = false; - } - - public boolean isMappingsModified() { - return mappingsModified; - } - - private boolean mappingsModified = false; - - public MapperParsingException(String message, boolean mappingsModified) { - super(message); - this.mappingsModified = mappingsModified; - } - - public MapperParsingException(String message, Throwable cause, boolean mappingsModified) { - super(message, cause); - this.mappingsModified = mappingsModified; } public MapperParsingException(String message, Throwable cause) { super(message, cause); - this.mappingsModified = false; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java b/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java new file mode 100644 index 00000000000..be4915b8392 --- /dev/null +++ b/src/main/java/org/elasticsearch/index/mapper/MapperUtils.java @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.ElasticsearchIllegalStateException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.mapper.object.RootObjectMapper; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; + +public enum MapperUtils { + ; + + /** + * Parse the given {@code context} with the given {@code mapper} and apply + * the potential mapping update in-place. This method is useful when + * composing mapping updates. + */ + public static M parseAndMergeUpdate(M mapper, ParseContext context) throws IOException { + final Mapper update = mapper.parse(context); + if (update != null) { + merge(mapper, update); + } + return mapper; + } + + /** + * Merge {@code mergeWith} into {@code mergeTo}. Note: this method only + * merges mappings, not lookup structures. Conflicts are returned as exceptions. + */ + public static void merge(Mapper mergeInto, Mapper mergeWith) { + MergeContext ctx = new MergeContext(new DocumentMapper.MergeFlags().simulate(false)) { + + @Override + public boolean hasConflicts() { + return false; + } + + @Override + public String[] buildConflicts() { + return Strings.EMPTY_ARRAY; + } + + @Override + public void addObjectMappers(Collection objectMappers) { + // no-op + } + + @Override + public void addFieldMappers(List> fieldMappers) { + // no-op + } + + @Override + public void addConflict(String mergeFailure) { + throw new ElasticsearchIllegalStateException("Merging dynamic updates triggered a conflict: " + mergeFailure); + } + }; + mergeInto.merge(mergeWith, ctx); + } + +} diff --git a/src/main/java/org/elasticsearch/index/mapper/MergeContext.java b/src/main/java/org/elasticsearch/index/mapper/MergeContext.java index 4c250c242f1..f8ddb837517 100644 --- a/src/main/java/org/elasticsearch/index/mapper/MergeContext.java +++ b/src/main/java/org/elasticsearch/index/mapper/MergeContext.java @@ -19,41 +19,33 @@ package org.elasticsearch.index.mapper; -import com.google.common.collect.Lists; +import org.elasticsearch.index.mapper.object.ObjectMapper; +import java.util.Collection; import java.util.List; /** * */ -public class MergeContext { +public abstract class MergeContext { - private final DocumentMapper documentMapper; private final DocumentMapper.MergeFlags mergeFlags; - private final List mergeConflicts = Lists.newArrayList(); - public MergeContext(DocumentMapper documentMapper, DocumentMapper.MergeFlags mergeFlags) { - this.documentMapper = documentMapper; + public MergeContext(DocumentMapper.MergeFlags mergeFlags) { this.mergeFlags = mergeFlags; } - public DocumentMapper docMapper() { - return documentMapper; - } + public abstract void addFieldMappers(List> fieldMappers); + + public abstract void addObjectMappers(Collection objectMappers); public DocumentMapper.MergeFlags mergeFlags() { return mergeFlags; } - public void addConflict(String mergeFailure) { - mergeConflicts.add(mergeFailure); - } + public abstract void addConflict(String mergeFailure); - public boolean hasConflicts() { - return !mergeConflicts.isEmpty(); - } + public abstract boolean hasConflicts(); - public String[] buildConflicts() { - return mergeConflicts.toArray(new String[mergeConflicts.size()]); - } + public abstract String[] buildConflicts(); } diff --git a/src/main/java/org/elasticsearch/index/mapper/ParseContext.java b/src/main/java/org/elasticsearch/index/mapper/ParseContext.java index 9f50a7df045..7cf3d97938b 100644 --- a/src/main/java/org/elasticsearch/index/mapper/ParseContext.java +++ b/src/main/java/org/elasticsearch/index/mapper/ParseContext.java @@ -22,7 +22,7 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.ObjectObjectMap; import com.carrotsearch.hppc.ObjectObjectOpenHashMap; import com.google.common.collect.Lists; -import org.apache.lucene.analysis.Analyzer; + import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -38,7 +38,11 @@ import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.mapper.DocumentMapper.ParseListener; import org.elasticsearch.index.mapper.object.RootObjectMapper; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; /** * @@ -194,31 +198,6 @@ public abstract class ParseContext { return in.docMapperParser(); } - @Override - public boolean mappingsModified() { - return in.mappingsModified(); - } - - @Override - public void setMappingsModified() { - in.setMappingsModified(); - } - - @Override - public void setWithinNewMapper() { - in.setWithinNewMapper(); - } - - @Override - public void clearWithinNewMapper() { - in.clearWithinNewMapper(); - } - - @Override - public boolean isWithinNewMapper() { - return in.isWithinNewMapper(); - } - @Override public boolean isWithinCopyTo() { return in.isWithinCopyTo(); @@ -379,6 +358,15 @@ public abstract class ParseContext { return in.stringBuilder(); } + @Override + public void addRootObjectUpdate(RootObjectMapper update) { + in.addRootObjectUpdate(update); + } + + @Override + public List updates() { + return in.updates(); + } } public static class InternalParseContext extends ParseContext { @@ -414,12 +402,13 @@ public abstract class ParseContext { private Map ignoredValues = new HashMap<>(); private boolean mappingsModified = false; - private boolean withinNewMapper = false; private AllEntries allEntries = new AllEntries(); private float docBoost = 1.0f; + private final List rootMapperDynamicUpdates = new ArrayList<>(); + public InternalParseContext(String index, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) { this.index = index; this.indexSettings = indexSettings; @@ -444,11 +433,11 @@ public abstract class ParseContext { this.source = source == null ? null : sourceToParse.source(); this.path.reset(); this.mappingsModified = false; - this.withinNewMapper = false; this.listener = listener == null ? DocumentMapper.ParseListener.EMPTY : listener; this.allEntries = new AllEntries(); this.ignoredValues.clear(); this.docBoost = 1.0f; + this.rootMapperDynamicUpdates.clear(); } @Override @@ -461,31 +450,6 @@ public abstract class ParseContext { return this.docMapperParser; } - @Override - public boolean mappingsModified() { - return this.mappingsModified; - } - - @Override - public void setMappingsModified() { - this.mappingsModified = true; - } - - @Override - public void setWithinNewMapper() { - this.withinNewMapper = true; - } - - @Override - public void clearWithinNewMapper() { - this.withinNewMapper = false; - } - - @Override - public boolean isWithinNewMapper() { - return withinNewMapper; - } - @Override public String index() { return this.index; @@ -638,22 +602,22 @@ public abstract class ParseContext { stringBuilder.setLength(0); return this.stringBuilder; } + + @Override + public void addRootObjectUpdate(RootObjectMapper mapper) { + rootMapperDynamicUpdates.add(mapper); + } + + @Override + public List updates() { + return rootMapperDynamicUpdates; + } } public abstract boolean flyweight(); public abstract DocumentMapperParser docMapperParser(); - public abstract boolean mappingsModified(); - - public abstract void setMappingsModified(); - - public abstract void setWithinNewMapper(); - - public abstract void clearWithinNewMapper(); - - public abstract boolean isWithinNewMapper(); - /** * Return a new context that will be within a copy-to operation. */ @@ -854,4 +818,15 @@ public abstract class ParseContext { */ public abstract StringBuilder stringBuilder(); + /** + * Add a dynamic update to the root object mapper. + * TODO: can we nuke it, it is only needed for copy_to + */ + public abstract void addRootObjectUpdate(RootObjectMapper update); + + /** + * Get dynamic updates to the root object mapper. + * TODO: can we nuke it, it is only needed for copy_to + */ + public abstract List updates(); } diff --git a/src/main/java/org/elasticsearch/index/mapper/StrictDynamicMappingException.java b/src/main/java/org/elasticsearch/index/mapper/StrictDynamicMappingException.java index 098d5abf086..f675396369b 100644 --- a/src/main/java/org/elasticsearch/index/mapper/StrictDynamicMappingException.java +++ b/src/main/java/org/elasticsearch/index/mapper/StrictDynamicMappingException.java @@ -24,8 +24,8 @@ import org.elasticsearch.rest.RestStatus; */ public class StrictDynamicMappingException extends MapperParsingException { - public StrictDynamicMappingException(String path, String fieldName, boolean mappingsModified) { - super("mapping set to strict, dynamic introduction of [" + fieldName + "] within [" + path + "] is not allowed", mappingsModified); + public StrictDynamicMappingException(String path, String fieldName) { + super("mapping set to strict, dynamic introduction of [" + fieldName + "] within [" + path + "] is not allowed"); } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java index b68be28e2f1..2e7328c2907 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/AbstractFieldMapper.java @@ -44,6 +44,7 @@ import org.apache.lucene.search.TermRangeFilter; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchIllegalArgumentException; +import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.ImmutableOpenMap; @@ -70,6 +71,7 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.internal.AllFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; +import org.elasticsearch.index.mapper.object.RootObjectMapper; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.search.FieldDataTermsFilter; import org.elasticsearch.index.similarity.SimilarityLookupService; @@ -81,7 +83,6 @@ import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.TreeMap; /** @@ -434,7 +435,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { final List fields = new ArrayList<>(2); try { parseCreateField(context, fields); @@ -447,12 +448,13 @@ public abstract class AbstractFieldMapper implements FieldMapper { } } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e, context.mappingsModified()); + throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e); } multiFields.parse(this, context); if (copyTo != null) { copyTo.parse(context); } + return null; } /** @@ -968,7 +970,7 @@ public abstract class AbstractFieldMapper implements FieldMapper { // first add all field mappers if (newFieldMappers != null) { - mergeContext.docMapper().addFieldMappers(newFieldMappers); + mergeContext.addFieldMappers(newFieldMappers); } // now publish mappers if (newMappersBuilder != null) { @@ -1089,54 +1091,41 @@ public abstract class AbstractFieldMapper implements FieldMapper { // The path of the dest field might be completely different from the current one so we need to reset it context = context.overridePath(new ContentPath(0)); + ObjectMapper mapper = context.root(); + String objectPath = ""; + String fieldPath = field; int posDot = field.lastIndexOf('.'); if (posDot > 0) { - // Compound name - String objectPath = field.substring(0, posDot); - String fieldPath = field.substring(posDot + 1); - ObjectMapper mapper = context.docMapper().objectMappers().get(objectPath); - if (mapper == null) { - //TODO: Create an object dynamically? - throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]", context.mappingsModified()); - } - + objectPath = field.substring(0, posDot); context.path().add(objectPath); - - // We might be in dynamically created field already, so need to clean withinNewMapper flag - // and then restore it, so we wouldn't miss new mappers created from copy_to fields - boolean origWithinNewMapper = context.isWithinNewMapper(); - context.clearWithinNewMapper(); - - try { - mapper.parseDynamicValue(context, fieldPath, context.parser().currentToken()); - } finally { - if (origWithinNewMapper) { - context.setWithinNewMapper(); - } else { - context.clearWithinNewMapper(); - } - } - - } else { - // We might be in dynamically created field already, so need to clean withinNewMapper flag - // and then restore it, so we wouldn't miss new mappers created from copy_to fields - boolean origWithinNewMapper = context.isWithinNewMapper(); - context.clearWithinNewMapper(); - try { - context.docMapper().root().parseDynamicValue(context, field, context.parser().currentToken()); - } finally { - if (origWithinNewMapper) { - context.setWithinNewMapper(); - } else { - context.clearWithinNewMapper(); - } - } - + mapper = context.docMapper().objectMappers().get(objectPath); + fieldPath = field.substring(posDot + 1); } + if (mapper == null) { + //TODO: Create an object dynamically? + throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]"); + } + ObjectMapper update = mapper.parseDynamicValue(context, fieldPath, context.parser().currentToken()); + assert update != null; // we are parsing a dynamic value so we necessarily created a new mapping + + // propagate the update to the root + while (objectPath.length() > 0) { + String parentPath = ""; + ObjectMapper parent = context.root(); + posDot = objectPath.lastIndexOf('.'); + if (posDot > 0) { + parentPath = objectPath.substring(0, posDot); + parent = context.docMapper().objectMappers().get(parentPath); + } + if (parent == null) { + throw new ElasticsearchIllegalStateException("[" + objectPath + "] has no parent for path [" + parentPath + "]"); + } + update = parent.mappingUpdate(update); + objectPath = parentPath; + } + context.addRootObjectUpdate((RootObjectMapper) update); } } - - } /** diff --git a/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java index 03792a11bd3..7e037bd533c 100644 --- a/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/core/CompletionFieldMapper.java @@ -266,7 +266,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper { } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { XContentParser parser = context.parser(); XContentParser.Token token = parser.currentToken(); @@ -382,6 +382,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper { context.doc().add(getCompletionField(ctx, input, suggestPayload)); } } + return null; } private void checkWeight(long weight) { diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index 8d2460bffdd..f7a39b2c952 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -515,7 +515,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implement } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { ContentPath.Type origPathType = context.path().pathType(); context.path().pathType(pathType); context.path().add(name()); @@ -565,6 +565,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper implement context.path().remove(); context.path().pathType(origPathType); + return null; } private void parseGeohashField(ParseContext context, String geohash) throws IOException { diff --git a/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java index a96a84550f7..896185f39f6 100644 --- a/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/geo/GeoShapeFieldMapper.java @@ -237,19 +237,19 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { try { Shape shape = context.parseExternalValue(Shape.class); if (shape == null) { ShapeBuilder shapeBuilder = ShapeBuilder.parse(context.parser(), this); if (shapeBuilder == null) { - return; + return null; } shape = shapeBuilder.build(); } Field[] fields = defaultStrategy.createIndexableFields(shape); if (fields == null || fields.length == 0) { - return; + return null; } for (Field field : fields) { if (!customBoost()) { @@ -262,6 +262,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper { } catch (Exception e) { throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e); } + return null; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java index 98e64ccddae..7fd2d5859ea 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/AllFieldMapper.java @@ -25,7 +25,6 @@ import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; @@ -33,9 +32,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.all.AllField; import org.elasticsearch.common.lucene.all.AllTermQuery; -import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.settings.loader.SettingsLoader; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.fielddata.FieldDataType; @@ -97,9 +94,6 @@ public class AllFieldMapper extends AbstractFieldMapper implements Inter private EnabledAttributeMapper enabled = Defaults.ENABLED; - // an internal flag, automatically set if we encounter boosting - boolean autoBoost = false; - public Builder() { super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE)); builder = this; @@ -120,7 +114,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements Inter } fieldType.setTokenized(true); - return new AllFieldMapper(name, fieldType, indexAnalyzer, searchAnalyzer, enabled, autoBoost, similarity, normsLoading, fieldDataSettings, context.indexSettings()); + return new AllFieldMapper(name, fieldType, indexAnalyzer, searchAnalyzer, enabled, similarity, normsLoading, fieldDataSettings, context.indexSettings()); } } @@ -154,8 +148,8 @@ public class AllFieldMapper extends AbstractFieldMapper implements Inter if (fieldName.equals("enabled")) { builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED); iterator.remove(); - } else if (fieldName.equals("auto_boost")) { - builder.autoBoost = nodeBooleanValue(fieldNode); + } else if (fieldName.equals("auto_boost") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) { + // Old 1.x setting which is now ignored iterator.remove(); } } @@ -165,24 +159,17 @@ public class AllFieldMapper extends AbstractFieldMapper implements Inter private EnabledAttributeMapper enabledState; - // The autoBoost flag is automatically set based on indexed docs on the mappings - // if a doc is indexed with a specific boost value and part of _all, it is automatically - // set to true. This allows to optimize (automatically, which we like) for the common case - // where fields don't usually have boost associated with them, and we don't need to use the - // special SpanTermQuery to look at payloads - private volatile boolean autoBoost; public AllFieldMapper(Settings indexSettings) { - this(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE), null, null, Defaults.ENABLED, false, null, null, null, indexSettings); + this(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE), null, null, Defaults.ENABLED, null, null, null, indexSettings); } protected AllFieldMapper(String name, FieldType fieldType, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, - EnabledAttributeMapper enabled, boolean autoBoost, SimilarityProvider similarity, Loading normsLoading, + EnabledAttributeMapper enabled, SimilarityProvider similarity, Loading normsLoading, @Nullable Settings fieldDataSettings, Settings indexSettings) { super(new Names(name, name, name, name), 1.0f, fieldType, false, indexAnalyzer, searchAnalyzer, similarity, normsLoading, fieldDataSettings, indexSettings); this.enabledState = enabled; - this.autoBoost = autoBoost; } @@ -202,13 +189,7 @@ public class AllFieldMapper extends AbstractFieldMapper implements Inter @Override public Query queryStringTermQuery(Term term) { - if (!autoBoost) { - return new TermQuery(term); - } - if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) { - return new AllTermQuery(term); - } - return new TermQuery(term); + return new AllTermQuery(term); } @Override @@ -226,8 +207,9 @@ public class AllFieldMapper extends AbstractFieldMapper implements Inter } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { // we parse in post parse + return null; } @Override @@ -242,14 +224,6 @@ public class AllFieldMapper extends AbstractFieldMapper implements Inter } // reset the entries context.allEntries().reset(); - - // if the autoBoost flag is not set, and we indexed a doc with custom boost, make - // sure to update the flag, and notify mappings on change - if (!autoBoost && context.allEntries().customBoost()) { - autoBoost = true; - context.setMappingsModified(); - } - Analyzer analyzer = findAnalyzer(context); fields.add(new AllField(names.indexName(), context.allEntries(), analyzer, fieldType)); } @@ -305,9 +279,6 @@ public class AllFieldMapper extends AbstractFieldMapper implements Inter if (includeDefaults || enabledState != Defaults.ENABLED) { builder.field("enabled", enabledState.enabled); } - if (includeDefaults || autoBoost != false) { - builder.field("auto_boost", autoBoost); - } if (includeDefaults || fieldType.stored() != Defaults.FIELD_TYPE.stored()) { builder.field("store", fieldType.stored()); } diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java index b38c2186d66..33633d3c06d 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/FieldNamesFieldMapper.java @@ -184,8 +184,9 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper implement } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { // we parse in post parse + return null; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index 816bb4557c7..6556aa20bff 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -307,16 +307,11 @@ public class IdFieldMapper extends AbstractFieldMapper implements Intern @Override public void postParse(ParseContext context) throws IOException { if (context.id() == null && !context.sourceToParse().flyweight()) { - throw new MapperParsingException("No id found while parsing the content source", context.mappingsModified()); + throw new MapperParsingException("No id found while parsing the content source"); } // it either get built in the preParse phase, or get parsed... } - @Override - public void parse(ParseContext context) throws IOException { - super.parse(context); - } - @Override public boolean includeInObject() { return true; @@ -329,7 +324,7 @@ public class IdFieldMapper extends AbstractFieldMapper implements Intern // we are in the parse Phase String id = parser.text(); if (context.id() != null && !context.id().equals(id)) { - throw new MapperParsingException("Provided id [" + context.id() + "] does not match the content one [" + id + "]", context.mappingsModified()); + throw new MapperParsingException("Provided id [" + context.id() + "] does not match the content one [" + id + "]"); } context.id(id); } // else we are in the pre/post parse phase diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java index 7bbf155276d..a530102e673 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/IndexFieldMapper.java @@ -166,8 +166,8 @@ public class IndexFieldMapper extends AbstractFieldMapper implements Int } @Override - public void parse(ParseContext context) throws IOException { - + public Mapper parse(ParseContext context) throws IOException { + return null; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java index 1d883196189..8aee69f8ba3 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/RoutingFieldMapper.java @@ -182,10 +182,11 @@ public class RoutingFieldMapper extends AbstractFieldMapper implements I } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { // no need ot parse here, we either get the routing in the sourceToParse // or we don't have routing, if we get it in sourceToParse, we process it in preParse // which will always be called + return null; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java index b6f269d75e1..bd954a8b756 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SizeFieldMapper.java @@ -134,8 +134,9 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper { } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { // nothing to do here, we call the parent in postParse + return null; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java index 21544a9e859..3814ba41ee6 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/SourceFieldMapper.java @@ -251,8 +251,9 @@ public class SourceFieldMapper extends AbstractFieldMapper implements In } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { // nothing to do here, we will call it in pre parse + return null; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java index 4f2c57e3e94..10a14169755 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TTLFieldMapper.java @@ -175,7 +175,7 @@ public class TTLFieldMapper extends LongFieldMapper implements InternalMapper, R } @Override - public void parse(ParseContext context) throws IOException, MapperParsingException { + public Mapper parse(ParseContext context) throws IOException, MapperParsingException { if (context.sourceToParse().ttl() < 0) { // no ttl has been provided externally long ttl; if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) { @@ -188,6 +188,7 @@ public class TTLFieldMapper extends LongFieldMapper implements InternalMapper, R } context.sourceToParse().ttl(ttl); } + return null; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java index f8fa3984ef5..18075ff3866 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TimestampFieldMapper.java @@ -273,8 +273,9 @@ public class TimestampFieldMapper extends DateFieldMapper implements InternalMap } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { // nothing to do here, we call the parent in preParse + return null; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java index df72d57b063..c93a1545aec 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/TypeFieldMapper.java @@ -158,8 +158,9 @@ public class TypeFieldMapper extends AbstractFieldMapper implements Inte } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { // we parse in pre parse + return null; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java index 49e6242d0df..d84835d9f3d 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/UidFieldMapper.java @@ -167,8 +167,9 @@ public class UidFieldMapper extends AbstractFieldMapper implements Internal } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { // nothing to do here, we either do it in post parse, or in pre parse. + return null; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java b/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java index 9822e4ede05..4ae3eaa415a 100644 --- a/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/internal/VersionFieldMapper.java @@ -113,8 +113,9 @@ public class VersionFieldMapper extends AbstractFieldMapper implements Int } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { // _version added in preparse + return null; } @Override diff --git a/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java b/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java index 653f9f0804a..dfc4e64b342 100644 --- a/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/object/ObjectMapper.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.mapper.object; import com.google.common.collect.Iterables; + import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; @@ -38,6 +39,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapperParser; import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.FieldMapperListener; @@ -45,6 +47,7 @@ import org.elasticsearch.index.mapper.InternalMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilders; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MapperUtils; import org.elasticsearch.index.mapper.MergeContext; import org.elasticsearch.index.mapper.MergeMappingException; import org.elasticsearch.index.mapper.ObjectMapperListener; @@ -84,7 +87,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType; /** * */ -public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { +public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll, Cloneable { public static final String CONTENT_TYPE = "object"; public static final String NESTED_CONTENT_TYPE = "nested"; @@ -370,8 +373,6 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { private volatile CopyOnWriteHashMap mappers; - private final Object mutex = new Object(); - ObjectMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map mappers) { this.name = name; this.fullPath = fullPath; @@ -389,6 +390,28 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { this.nestedTypeFilter = new TermFilter(new Term(TypeFieldMapper.NAME, nestedTypePathAsBytes)); } + @Override + protected ObjectMapper clone() { + ObjectMapper clone; + try { + clone = (ObjectMapper) super.clone(); + } catch (CloneNotSupportedException e) { + throw new RuntimeException(); + } + return clone; + } + + /** + * Build a mapping update with the provided sub mapping update. + */ + public ObjectMapper mappingUpdate(Mapper mapper) { + ObjectMapper mappingUpdate = clone(); + // reset the sub mappers + mappingUpdate.mappers = new CopyOnWriteHashMap<>(); + mappingUpdate.putMapper(mapper); + return mappingUpdate; + } + @Override public String name() { return this.name; @@ -440,14 +463,16 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { return this.nestedTypeFilter; } - public ObjectMapper putMapper(Mapper mapper) { + /** + * Put a new mapper. + * NOTE: this method must be called under the current {@link DocumentMapper} + * lock if concurrent updates are expected. + */ + public void putMapper(Mapper mapper) { if (mapper instanceof AllFieldMapper.IncludeInAll) { ((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll); } - synchronized (mutex) { - mappers = mappers.copyAndPut(mapper.name(), mapper); - } - return this; + mappers = mappers.copyAndPut(mapper.name(), mapper); } @Override @@ -482,10 +507,10 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { } @Override - public void parse(ParseContext context) throws IOException { + public ObjectMapper parse(ParseContext context) throws IOException { if (!enabled) { context.parser().skipChildren(); - return; + return null; } XContentParser parser = context.parser(); @@ -493,13 +518,13 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { XContentParser.Token token = parser.currentToken(); if (token == XContentParser.Token.VALUE_NULL) { // the object is null ("obj1" : null), simply bail - return; + return null; } if (token.isValue() && !allowValue()) { // if we are parsing an object but it is just a value, its only allowed on root level parsers with there // is a field name with the same name as the type - throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but found a concrete value", context.mappingsModified()); + throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but found a concrete value"); } if (nested.isNested()) { @@ -533,21 +558,30 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { token = parser.nextToken(); } + ObjectMapper update = null; while (token != XContentParser.Token.END_OBJECT) { + ObjectMapper newUpdate = null; if (token == XContentParser.Token.START_OBJECT) { - serializeObject(context, currentFieldName); + newUpdate = serializeObject(context, currentFieldName); } else if (token == XContentParser.Token.START_ARRAY) { - serializeArray(context, currentFieldName); + newUpdate = serializeArray(context, currentFieldName); } else if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { serializeNullValue(context, currentFieldName); } else if (token == null) { - throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?", context.mappingsModified()); + throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?"); } else if (token.isValue()) { - serializeValue(context, currentFieldName, token); + newUpdate = serializeValue(context, currentFieldName, token); } token = parser.nextToken(); + if (newUpdate != null) { + if (update == null) { + update = newUpdate; + } else { + MapperUtils.merge(update, newUpdate); + } + } } // restore the enable path flag context.path().pathType(origPathType); @@ -577,6 +611,7 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { } } } + return update; } private void serializeNullValue(ParseContext context, String lastFieldName) throws IOException { @@ -585,54 +620,51 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { if (mapper != null) { if (mapper instanceof FieldMapper) { if (!((FieldMapper) mapper).supportsNullValue()) { - throw new MapperParsingException("no object mapping found for null value in [" + lastFieldName + "]", context.mappingsModified()); + throw new MapperParsingException("no object mapping found for null value in [" + lastFieldName + "]"); } } mapper.parse(context); } else if (dynamic == Dynamic.STRICT) { - throw new StrictDynamicMappingException(fullPath, lastFieldName, context.mappingsModified()); + throw new StrictDynamicMappingException(fullPath, lastFieldName); } } - private void serializeObject(final ParseContext context, String currentFieldName) throws IOException { + private ObjectMapper serializeObject(final ParseContext context, String currentFieldName) throws IOException { if (currentFieldName == null) { - throw new MapperParsingException("object mapping [" + name + "] trying to serialize an object with no field associated with it, current value [" + context.parser().textOrNull() + "]", context.mappingsModified()); + throw new MapperParsingException("object mapping [" + name + "] trying to serialize an object with no field associated with it, current value [" + context.parser().textOrNull() + "]"); } context.path().add(currentFieldName); + ObjectMapper update = null; Mapper objectMapper = mappers.get(currentFieldName); if (objectMapper != null) { - objectMapper.parse(context); + final Mapper subUpdate = objectMapper.parse(context); + if (subUpdate != null) { + // propagate mapping update + update = mappingUpdate(subUpdate); + } } else { Dynamic dynamic = this.dynamic; if (dynamic == null) { dynamic = context.root().dynamic(); } if (dynamic == Dynamic.STRICT) { - throw new StrictDynamicMappingException(fullPath, currentFieldName, context.mappingsModified()); + throw new StrictDynamicMappingException(fullPath, currentFieldName); } else if (dynamic == Dynamic.TRUE) { - // we sync here just so we won't add it twice. Its not the end of the world - // to sync here since next operations will get it before - synchronized (mutex) { - objectMapper = mappers.get(currentFieldName); - if (objectMapper == null) { - // remove the current field name from path, since template search and the object builder add it as well... - context.path().remove(); - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "object"); - if (builder == null) { - builder = MapperBuilders.object(currentFieldName).enabled(true).pathType(pathType); - // if this is a non root object, then explicitly set the dynamic behavior if set - if (!(this instanceof RootObjectMapper) && this.dynamic != Defaults.DYNAMIC) { - ((Builder) builder).dynamic(this.dynamic); - } - } - BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path()); - objectMapper = builder.build(builderContext); - putDynamicMapper(context, currentFieldName, objectMapper); - } else { - objectMapper.parse(context); + // remove the current field name from path, since template search and the object builder add it as well... + context.path().remove(); + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "object"); + if (builder == null) { + builder = MapperBuilders.object(currentFieldName).enabled(true).pathType(pathType); + // if this is a non root object, then explicitly set the dynamic behavior if set + if (!(this instanceof RootObjectMapper) && this.dynamic != Defaults.DYNAMIC) { + ((Builder) builder).dynamic(this.dynamic); } } + BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path()); + objectMapper = builder.build(builderContext); + context.path().add(currentFieldName); + update = mappingUpdate(MapperUtils.parseAndMergeUpdate(objectMapper, context)); } else { // not dynamic, read everything up to end object context.parser().skipChildren(); @@ -640,9 +672,10 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { } context.path().remove(); + return update; } - private void serializeArray(ParseContext context, String lastFieldName) throws IOException { + private ObjectMapper serializeArray(ParseContext context, String lastFieldName) throws IOException { String arrayFieldName = lastFieldName; Mapper mapper = mappers.get(lastFieldName); if (mapper != null) { @@ -650,9 +683,15 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { // expects an array, if so we pass the context straight to the mapper and if not // we serialize the array components if (mapper instanceof ArrayValueMapperParser) { - mapper.parse(context); + final Mapper subUpdate = mapper.parse(context); + if (subUpdate != null) { + // propagate the mapping update + return mappingUpdate(subUpdate); + } else { + return null; + } } else { - serializeNonDynamicArray(context, lastFieldName, arrayFieldName); + return serializeNonDynamicArray(context, lastFieldName, arrayFieldName); } } else { @@ -661,278 +700,217 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { dynamic = context.root().dynamic(); } if (dynamic == Dynamic.STRICT) { - throw new StrictDynamicMappingException(fullPath, arrayFieldName, context.mappingsModified()); + throw new StrictDynamicMappingException(fullPath, arrayFieldName); } else if (dynamic == Dynamic.TRUE) { - // we sync here just so we won't add it twice. Its not the end of the world - // to sync here since next operations will get it before - synchronized (mutex) { - mapper = mappers.get(arrayFieldName); - if (mapper == null) { - Mapper.Builder builder = context.root().findTemplateBuilder(context, arrayFieldName, "object"); - if (builder == null) { - serializeNonDynamicArray(context, lastFieldName, arrayFieldName); - return; - } - BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path()); - mapper = builder.build(builderContext); - if (mapper != null && mapper instanceof ArrayValueMapperParser) { - putDynamicMapper(context, arrayFieldName, mapper); - } else { - serializeNonDynamicArray(context, lastFieldName, arrayFieldName); - } - } else { - - serializeNonDynamicArray(context, lastFieldName, arrayFieldName); - } + Mapper.Builder builder = context.root().findTemplateBuilder(context, arrayFieldName, "object"); + if (builder == null) { + return serializeNonDynamicArray(context, lastFieldName, arrayFieldName); + } + BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path()); + mapper = builder.build(builderContext); + if (mapper != null && mapper instanceof ArrayValueMapperParser) { + context.path().add(arrayFieldName); + mapper = MapperUtils.parseAndMergeUpdate(mapper, context); + return mappingUpdate(mapper); + } else { + return serializeNonDynamicArray(context, lastFieldName, arrayFieldName); } } else { - - serializeNonDynamicArray(context, lastFieldName, arrayFieldName); + return serializeNonDynamicArray(context, lastFieldName, arrayFieldName); } } } - private void putDynamicMapper(ParseContext context, String arrayFieldName, Mapper mapper) throws IOException { - // ...now re add it - context.path().add(arrayFieldName); - context.setMappingsModified(); - - if (context.isWithinNewMapper()) { - // within a new mapper, no need to traverse, - // just parse - mapper.parse(context); - } else { - // create a context of new mapper, so we batch - // aggregate all the changes within - // this object mapper once, and traverse all of - // them to add them in a single go - context.setWithinNewMapper(); - try { - mapper.parse(context); - FieldMapperListener.Aggregator newFields = new FieldMapperListener.Aggregator(); - ObjectMapperListener.Aggregator newObjects = new ObjectMapperListener.Aggregator(); - mapper.traverse(newFields); - mapper.traverse(newObjects); - // callback on adding those fields! - context.docMapper().addFieldMappers(newFields.mappers); - context.docMapper().addObjectMappers(newObjects.mappers); - } finally { - context.clearWithinNewMapper(); - } - } - - // only put after we traversed and did the - // callbacks, so other parsing won't see it only - // after we - // properly traversed it and adding the mappers - putMapper(mapper); - } - - private void serializeNonDynamicArray(ParseContext context, String lastFieldName, String arrayFieldName) throws IOException { + private ObjectMapper serializeNonDynamicArray(ParseContext context, String lastFieldName, String arrayFieldName) throws IOException { XContentParser parser = context.parser(); XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { - serializeObject(context, lastFieldName); + return serializeObject(context, lastFieldName); } else if (token == XContentParser.Token.START_ARRAY) { - serializeArray(context, lastFieldName); + return serializeArray(context, lastFieldName); } else if (token == XContentParser.Token.FIELD_NAME) { lastFieldName = parser.currentName(); } else if (token == XContentParser.Token.VALUE_NULL) { serializeNullValue(context, lastFieldName); } else if (token == null) { - throw new MapperParsingException("object mapping for [" + name + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?", context.mappingsModified()); + throw new MapperParsingException("object mapping for [" + name + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?"); } else { - serializeValue(context, lastFieldName, token); + return serializeValue(context, lastFieldName, token); } } + return null; } - private void serializeValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException { + private ObjectMapper serializeValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException { if (currentFieldName == null) { - throw new MapperParsingException("object mapping [" + name + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]", context.mappingsModified()); + throw new MapperParsingException("object mapping [" + name + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]"); } Mapper mapper = mappers.get(currentFieldName); if (mapper != null) { - mapper.parse(context); + Mapper subUpdate = mapper.parse(context); + if (subUpdate == null) { + return null; + } + return mappingUpdate(subUpdate); } else { - parseDynamicValue(context, currentFieldName, token); + return parseDynamicValue(context, currentFieldName, token); } } - public void parseDynamicValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException { + public ObjectMapper parseDynamicValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException { Dynamic dynamic = this.dynamic; if (dynamic == null) { dynamic = context.root().dynamic(); } if (dynamic == Dynamic.STRICT) { - throw new StrictDynamicMappingException(fullPath, currentFieldName, context.mappingsModified()); + throw new StrictDynamicMappingException(fullPath, currentFieldName); } if (dynamic == Dynamic.FALSE) { - return; + return null; } - // we sync here since we don't want to add this field twice to the document mapper - // its not the end of the world, since we add it to the mappers once we create it - // so next time we won't even get here for this field - synchronized (mutex) { - Mapper mapper = mappers.get(currentFieldName); - if (mapper == null) { - BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path()); - if (token == XContentParser.Token.VALUE_STRING) { - boolean resolved = false; + Mapper mapper = null; + BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path()); + if (token == XContentParser.Token.VALUE_STRING) { + boolean resolved = false; - // do a quick test to see if its fits a dynamic template, if so, use it. - // we need to do it here so we can handle things like attachment templates, where calling - // text (to see if its a date) causes the binary value to be cleared - if (!resolved) { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string", null); - if (builder != null) { - mapper = builder.build(builderContext); - resolved = true; - } - } + // do a quick test to see if its fits a dynamic template, if so, use it. + // we need to do it here so we can handle things like attachment templates, where calling + // text (to see if its a date) causes the binary value to be cleared + if (!resolved) { + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string", null); + if (builder != null) { + mapper = builder.build(builderContext); + resolved = true; + } + } - if (!resolved && context.root().dateDetection()) { - String text = context.parser().text(); - // a safe check since "1" gets parsed as well - if (Strings.countOccurrencesOf(text, ":") > 1 || Strings.countOccurrencesOf(text, "-") > 1 || Strings.countOccurrencesOf(text, "/") > 1) { - for (FormatDateTimeFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) { - try { - dateTimeFormatter.parser().parseMillis(text); - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date"); - if (builder == null) { - builder = dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter); - } - mapper = builder.build(builderContext); - resolved = true; - break; - } catch (Exception e) { - // failure to parse this, continue - } - } - } - } - if (!resolved && context.root().numericDetection()) { - String text = context.parser().text(); + if (!resolved && context.root().dateDetection()) { + String text = context.parser().text(); + // a safe check since "1" gets parsed as well + if (Strings.countOccurrencesOf(text, ":") > 1 || Strings.countOccurrencesOf(text, "-") > 1 || Strings.countOccurrencesOf(text, "/") > 1) { + for (FormatDateTimeFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) { try { - Long.parseLong(text); - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); + dateTimeFormatter.parser().parseMillis(text); + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date"); if (builder == null) { - builder = longField(currentFieldName); + builder = dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter); } mapper = builder.build(builderContext); resolved = true; + break; } catch (Exception e) { - // not a long number - } - if (!resolved) { - try { - Double.parseDouble(text); - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); - if (builder == null) { - builder = doubleField(currentFieldName); - } - mapper = builder.build(builderContext); - resolved = true; - } catch (Exception e) { - // not a long number - } + // failure to parse this, continue } } - if (!resolved) { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string"); - if (builder == null) { - builder = stringField(currentFieldName); - } - mapper = builder.build(builderContext); + } + } + if (!resolved && context.root().numericDetection()) { + String text = context.parser().text(); + try { + Long.parseLong(text); + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); + if (builder == null) { + builder = longField(currentFieldName); } - } else if (token == XContentParser.Token.VALUE_NUMBER) { - XContentParser.NumberType numberType = context.parser().numberType(); - if (numberType == XContentParser.NumberType.INT) { - if (context.parser().estimatedNumberType()) { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); - if (builder == null) { - builder = longField(currentFieldName); - } - mapper = builder.build(builderContext); - } else { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer"); - if (builder == null) { - builder = integerField(currentFieldName); - } - mapper = builder.build(builderContext); - } - } else if (numberType == XContentParser.NumberType.LONG) { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); - if (builder == null) { - builder = longField(currentFieldName); - } - mapper = builder.build(builderContext); - } else if (numberType == XContentParser.NumberType.FLOAT) { - if (context.parser().estimatedNumberType()) { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); - if (builder == null) { - builder = doubleField(currentFieldName); - } - mapper = builder.build(builderContext); - } else { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float"); - if (builder == null) { - builder = floatField(currentFieldName); - } - mapper = builder.build(builderContext); - } - } else if (numberType == XContentParser.NumberType.DOUBLE) { + mapper = builder.build(builderContext); + resolved = true; + } catch (Exception e) { + // not a long number + } + if (!resolved) { + try { + Double.parseDouble(text); Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); if (builder == null) { builder = doubleField(currentFieldName); } mapper = builder.build(builderContext); + resolved = true; + } catch (Exception e) { + // not a long number } - } else if (token == XContentParser.Token.VALUE_BOOLEAN) { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean"); + } + } + if (!resolved) { + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string"); + if (builder == null) { + builder = stringField(currentFieldName); + } + mapper = builder.build(builderContext); + } + } else if (token == XContentParser.Token.VALUE_NUMBER) { + XContentParser.NumberType numberType = context.parser().numberType(); + if (numberType == XContentParser.NumberType.INT) { + if (context.parser().estimatedNumberType()) { + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); if (builder == null) { - builder = booleanField(currentFieldName); - } - mapper = builder.build(builderContext); - } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "binary"); - if (builder == null) { - builder = binaryField(currentFieldName); + builder = longField(currentFieldName); } mapper = builder.build(builderContext); } else { - Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null); - if (builder != null) { - mapper = builder.build(builderContext); - } else { - // TODO how do we identify dynamically that its a binary value? - throw new ElasticsearchIllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]"); + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer"); + if (builder == null) { + builder = integerField(currentFieldName); } + mapper = builder.build(builderContext); } - - if (context.isWithinNewMapper()) { - mapper.parse(context); + } else if (numberType == XContentParser.NumberType.LONG) { + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long"); + if (builder == null) { + builder = longField(currentFieldName); + } + mapper = builder.build(builderContext); + } else if (numberType == XContentParser.NumberType.FLOAT) { + if (context.parser().estimatedNumberType()) { + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); + if (builder == null) { + builder = doubleField(currentFieldName); + } + mapper = builder.build(builderContext); } else { - context.setWithinNewMapper(); - try { - mapper.parse(context); - FieldMapperListener.Aggregator newFields = new FieldMapperListener.Aggregator(); - mapper.traverse(newFields); - context.docMapper().addFieldMappers(newFields.mappers); - } finally { - context.clearWithinNewMapper(); + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float"); + if (builder == null) { + builder = floatField(currentFieldName); } + mapper = builder.build(builderContext); } - - // only put after we traversed and did the callbacks, so other parsing won't see it only after we - // properly traversed it and adding the mappers - putMapper(mapper); - context.setMappingsModified(); + } else if (numberType == XContentParser.NumberType.DOUBLE) { + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double"); + if (builder == null) { + builder = doubleField(currentFieldName); + } + mapper = builder.build(builderContext); + } + } else if (token == XContentParser.Token.VALUE_BOOLEAN) { + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean"); + if (builder == null) { + builder = booleanField(currentFieldName); + } + mapper = builder.build(builderContext); + } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "binary"); + if (builder == null) { + builder = binaryField(currentFieldName); + } + mapper = builder.build(builderContext); + } else { + Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null); + if (builder != null) { + mapper = builder.build(builderContext); } else { - mapper.parse(context); + // TODO how do we identify dynamically that its a binary value? + throw new ElasticsearchIllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]"); } } + + mapper = MapperUtils.parseAndMergeUpdate(mapper, context); + + ObjectMapper update = null; + if (mapper != null) { + update = mappingUpdate(mapper); + } + return update; } @Override @@ -966,33 +944,30 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll { List mappersToPut = new ArrayList<>(); FieldMapperListener.Aggregator newFieldMappers = new FieldMapperListener.Aggregator(); ObjectMapperListener.Aggregator newObjectMappers = new ObjectMapperListener.Aggregator(); - synchronized (mutex) { - for (Mapper mapper : mergeWithObject.mappers.values()) { - Mapper mergeWithMapper = mapper; - Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name()); - if (mergeIntoMapper == null) { - // no mapping, simply add it if not simulating - if (!mergeContext.mergeFlags().simulate()) { - mappersToPut.add(mergeWithMapper); - mergeWithMapper.traverse(newFieldMappers); - mergeWithMapper.traverse(newObjectMappers); - } - } else { - mergeIntoMapper.merge(mergeWithMapper, mergeContext); + for (Mapper mapper : mergeWithObject.mappers.values()) { + Mapper mergeWithMapper = mapper; + Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name()); + if (mergeIntoMapper == null) { + // no mapping, simply add it if not simulating + if (!mergeContext.mergeFlags().simulate()) { + mappersToPut.add(mergeWithMapper); + mergeWithMapper.traverse(newFieldMappers); + mergeWithMapper.traverse(newObjectMappers); } - } - if (!newFieldMappers.mappers.isEmpty()) { - mergeContext.docMapper().addFieldMappers(newFieldMappers.mappers); - } - if (!newObjectMappers.mappers.isEmpty()) { - mergeContext.docMapper().addObjectMappers(newObjectMappers.mappers); - } - // and the mappers only after the administration have been done, so it will not be visible to parser (which first try to read with no lock) - for (Mapper mapper : mappersToPut) { - putMapper(mapper); + } else { + mergeIntoMapper.merge(mergeWithMapper, mergeContext); } } - + if (!newFieldMappers.mappers.isEmpty()) { + mergeContext.addFieldMappers(newFieldMappers.mappers); + } + if (!newObjectMappers.mappers.isEmpty()) { + mergeContext.addObjectMappers(newObjectMappers.mappers); + } + // add the mappers only after the administration have been done, so it will not be visible to parser (which first try to read with no lock) + for (Mapper mapper : mappersToPut) { + putMapper(mapper); + } } protected void doMerge(ObjectMapper mergeWith, MergeContext mergeContext) { diff --git a/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java b/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java index fb188ff0f8f..56d2b96429c 100644 --- a/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java +++ b/src/main/java/org/elasticsearch/index/mapper/object/RootObjectMapper.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.object; import com.google.common.collect.Lists; import com.google.common.collect.Sets; + import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.joda.FormatDateTimeFormatter; @@ -206,6 +207,14 @@ public class RootObjectMapper extends ObjectMapper { this.numericDetection = numericDetection; } + @Override + public ObjectMapper mappingUpdate(Mapper mapper) { + RootObjectMapper update = (RootObjectMapper) super.mappingUpdate(mapper); + // dynamic templates are irrelevant for dynamic mappings updates + update.dynamicTemplates = new DynamicTemplate[0]; + return update; + } + public boolean dateDetection() { return this.dateDetection; } @@ -231,7 +240,7 @@ public class RootObjectMapper extends ObjectMapper { String mappingType = dynamicTemplate.mappingType(dynamicType); Mapper.TypeParser typeParser = parserContext.typeParser(mappingType); if (typeParser == null) { - throw new MapperParsingException("failed to find type parsed [" + mappingType + "] for [" + name + "]", context.mappingsModified()); + throw new MapperParsingException("failed to find type parsed [" + mappingType + "] for [" + name + "]"); } return typeParser.parse(name, dynamicTemplate.mappingForName(name, dynamicType), parserContext); } diff --git a/src/main/java/org/elasticsearch/index/merge/policy/ElasticsearchMergePolicy.java b/src/main/java/org/elasticsearch/index/merge/policy/ElasticsearchMergePolicy.java index d53a809163d..dcd58e40f5a 100644 --- a/src/main/java/org/elasticsearch/index/merge/policy/ElasticsearchMergePolicy.java +++ b/src/main/java/org/elasticsearch/index/merge/policy/ElasticsearchMergePolicy.java @@ -48,7 +48,13 @@ public final class ElasticsearchMergePolicy extends MergePolicy { private static ESLogger logger = Loggers.getLogger(ElasticsearchMergePolicy.class); private final MergePolicy delegate; + + // True if the next merge request should do segment upgrades: private volatile boolean upgradeInProgress; + + // True if the next merge request should only upgrade ancient (an older Lucene major version than current) segments; + private volatile boolean upgradeOnlyAncientSegments; + private static final int MAX_CONCURRENT_UPGRADE_MERGES = 5; /** @param delegate the merge policy to wrap */ @@ -113,6 +119,26 @@ public final class ElasticsearchMergePolicy extends MergePolicy { return upgradedMergeSpecification(delegate.findMerges(mergeTrigger, segmentInfos, writer)); } + private boolean shouldUpgrade(SegmentCommitInfo info) { + org.apache.lucene.util.Version old = info.info.getVersion(); + org.apache.lucene.util.Version cur = Version.CURRENT.luceneVersion; + + // Something seriously wrong if this trips: + assert old.major <= cur.major; + + if (cur.major > old.major) { + // Always upgrade segment if Lucene's major version is too old + return true; + } + if (upgradeOnlyAncientSegments == false && cur.minor > old.minor) { + // If it's only a minor version difference, and we are not upgrading only ancient segments, + // also upgrade: + return true; + } + // Version matches, or segment is not ancient and we are only upgrading ancient segments: + return false; + } + @Override public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount, Map segmentsToMerge, IndexWriter writer) @@ -121,27 +147,35 @@ public final class ElasticsearchMergePolicy extends MergePolicy { if (upgradeInProgress) { MergeSpecification spec = new IndexUpgraderMergeSpecification(); for (SegmentCommitInfo info : segmentInfos) { - org.apache.lucene.util.Version old = info.info.getVersion(); - org.apache.lucene.util.Version cur = Version.CURRENT.luceneVersion; - if (cur.major > old.major || - cur.major == old.major && cur.minor > old.minor) { + + if (shouldUpgrade(info)) { + // TODO: Use IndexUpgradeMergePolicy instead. We should be comparing codecs, // for now we just assume every minor upgrade has a new format. logger.debug("Adding segment " + info.info.name + " to be upgraded"); spec.add(new OneMerge(Lists.newArrayList(info))); } + + // TODO: we could check IndexWriter.getMergingSegments and avoid adding merges that IW will just reject? + if (spec.merges.size() == MAX_CONCURRENT_UPGRADE_MERGES) { // hit our max upgrades, so return the spec. we will get a cascaded call to continue. logger.debug("Returning " + spec.merges.size() + " merges for upgrade"); return spec; } } + // We must have less than our max upgrade merges, so the next return will be our last in upgrading mode. - upgradeInProgress = false; if (spec.merges.isEmpty() == false) { - logger.debug("Return " + spec.merges.size() + " merges for end of upgrade"); + logger.debug("Returning " + spec.merges.size() + " merges for end of upgrade"); return spec; } + + // Only set this once there are 0 segments needing upgrading, because when we return a + // spec, IndexWriter may (silently!) reject that merge if some of the segments we asked + // to be merged were already being (naturally) merged: + upgradeInProgress = false; + // fall through, so when we don't have any segments to upgrade, the delegate policy // has a chance to decide what to do (e.g. collapse the segments to satisfy maxSegmentCount) } @@ -166,8 +200,9 @@ public final class ElasticsearchMergePolicy extends MergePolicy { * {@link IndexWriter#forceMerge} that is handled by this {@link MergePolicy}, as well as * cascading calls made by {@link IndexWriter}. */ - public void setUpgradeInProgress(boolean upgrade) { + public void setUpgradeInProgress(boolean upgrade, boolean onlyAncientSegments) { this.upgradeInProgress = upgrade; + this.upgradeOnlyAncientSegments = onlyAncientSegments; } @Override diff --git a/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/src/main/java/org/elasticsearch/index/shard/IndexShard.java index c05236d87af..1ce97d60d48 100644 --- a/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -457,7 +457,7 @@ public class IndexShard extends AbstractIndexShardComponent { ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper); return new Engine.Create(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates, autoGeneratedId); } catch (Throwable t) { - if (docMapper.v2() || (t instanceof MapperParsingException && ((MapperParsingException)t).isMappingsModified())) { + if (docMapper.v2()) { throw new WriteFailureException(t, docMapper.v1().type()); } else { throw t; @@ -493,7 +493,7 @@ public class IndexShard extends AbstractIndexShardComponent { ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper); return new Engine.Index(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates); } catch (Throwable t) { - if (docMapper.v2() || (t instanceof MapperParsingException && ((MapperParsingException) t).isMappingsModified())) { + if (docMapper.v2()) { throw new WriteFailureException(t, docMapper.v1().type()); } else { throw t; @@ -702,7 +702,8 @@ public class IndexShard extends AbstractIndexShardComponent { if (logger.isTraceEnabled()) { logger.trace("optimize with {}", optimize); } - engine().forceMerge(optimize.flush(), optimize.maxNumSegments(), optimize.onlyExpungeDeletes(), optimize.upgrade()); + engine().forceMerge(optimize.flush(), optimize.maxNumSegments(), optimize.onlyExpungeDeletes(), + optimize.upgrade(), optimize.upgradeOnlyAncientSegments()); } public SnapshotIndexCommit snapshotIndex() throws EngineException { diff --git a/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java b/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java index cc9a6281c51..23509582a66 100644 --- a/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java +++ b/src/main/java/org/elasticsearch/rest/action/admin/indices/upgrade/RestUpgradeAction.java @@ -38,6 +38,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.support.RestBuilderListener; +import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -73,12 +74,10 @@ public class RestUpgradeAction extends BaseRestHandler { public RestResponse buildResponse(IndicesSegmentResponse response, XContentBuilder builder) throws Exception { builder.startObject(); - // TODO: getIndices().values() is what IndecesSegmentsResponse uses, but this will produce different orders with jdk8? + // TODO: getIndices().values() is what IndicesSegmentsResponse uses, but this will produce different orders with jdk8? for (IndexSegments indexSegments : response.getIndices().values()) { - Tuple summary = calculateUpgradeStatus(indexSegments); builder.startObject(indexSegments.getIndex()); - builder.byteSizeField(SIZE_IN_BYTES, SIZE, summary.v1()); - builder.byteSizeField(SIZE_TO_UPGRADE_IN_BYTES, SIZE_TO_UPGRADE, summary.v2()); + buildUpgradeStatus(indexSegments, builder); builder.endObject(); } @@ -92,6 +91,7 @@ public class RestUpgradeAction extends BaseRestHandler { OptimizeRequest optimizeReq = new OptimizeRequest(Strings.splitStringByCommaToArray(request.param("index"))); optimizeReq.flush(true); optimizeReq.upgrade(true); + optimizeReq.upgradeOnlyAncientSegments(request.paramAsBoolean("only_ancient_segments", false)); optimizeReq.maxNumSegments(Integer.MAX_VALUE); // we just want to upgrade the segments, not actually optimize to a single segment client.admin().indices().optimize(optimizeReq, new RestBuilderListener(channel) { @Override @@ -104,15 +104,18 @@ public class RestUpgradeAction extends BaseRestHandler { }); } - Tuple calculateUpgradeStatus(IndexSegments indexSegments) { + void buildUpgradeStatus(IndexSegments indexSegments, XContentBuilder builder) throws IOException { long total_bytes = 0; long to_upgrade_bytes = 0; + long to_upgrade_bytes_ancient = 0; for (IndexShardSegments shard : indexSegments) { for (ShardSegments segs : shard.getShards()) { for (Segment seg : segs.getSegments()) { total_bytes += seg.sizeInBytes; - if (seg.version.major != Version.CURRENT.luceneVersion.major || - seg.version.minor != Version.CURRENT.luceneVersion.minor) { + if (seg.version.major != Version.CURRENT.luceneVersion.major) { + to_upgrade_bytes_ancient += seg.sizeInBytes; + to_upgrade_bytes += seg.sizeInBytes; + } else if (seg.version.minor != Version.CURRENT.luceneVersion.minor) { // TODO: this comparison is bogus! it would cause us to upgrade even with the same format // instead, we should check if the codec has changed to_upgrade_bytes += seg.sizeInBytes; @@ -120,11 +123,16 @@ public class RestUpgradeAction extends BaseRestHandler { } } } - return new Tuple<>(total_bytes, to_upgrade_bytes); + + builder.byteSizeField(SIZE_IN_BYTES, SIZE, total_bytes); + builder.byteSizeField(SIZE_TO_UPGRADE_IN_BYTES, SIZE_TO_UPGRADE, to_upgrade_bytes); + builder.byteSizeField(SIZE_TO_UPGRADE_ANCIENT_IN_BYTES, SIZE_TO_UPGRADE_ANCIENT, to_upgrade_bytes_ancient); } static final XContentBuilderString SIZE = new XContentBuilderString("size"); static final XContentBuilderString SIZE_IN_BYTES = new XContentBuilderString("size_in_bytes"); static final XContentBuilderString SIZE_TO_UPGRADE = new XContentBuilderString("size_to_upgrade"); + static final XContentBuilderString SIZE_TO_UPGRADE_ANCIENT = new XContentBuilderString("size_to_upgrade_ancient"); static final XContentBuilderString SIZE_TO_UPGRADE_IN_BYTES = new XContentBuilderString("size_to_upgrade_in_bytes"); + static final XContentBuilderString SIZE_TO_UPGRADE_ANCIENT_IN_BYTES = new XContentBuilderString("size_to_upgrade_ancient_in_bytes"); } diff --git a/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java b/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java index 1afd76b46e0..426030aef2a 100644 --- a/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java +++ b/src/main/java/org/elasticsearch/transport/netty/MessageChannelHandler.java @@ -113,9 +113,11 @@ public class MessageChannelHandler extends SimpleChannelUpstreamHandler { String action = handleRequest(ctx.getChannel(), wrappedStream, requestId, version); if (buffer.readerIndex() != expectedIndexReader) { if (buffer.readerIndex() < expectedIndexReader) { - logger.warn("Message not fully read (request) for [{}] and action [{}], resetting", requestId, action); + logger.warn("Message not fully read (request) for requestId [{}], action [{}], readerIndex [{}] vs expected [{}]; resetting", + requestId, action, buffer.readerIndex(), expectedIndexReader); } else { - logger.warn("Message read past expected size (request) for [{}] and action [{}], resetting", requestId, action); + logger.warn("Message read past expected size (request) for requestId=[{}], action [{}], readerIndex [{}] vs expected [{}]; resetting", + requestId, action, buffer.readerIndex(), expectedIndexReader); } buffer.readerIndex(expectedIndexReader); } diff --git a/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java b/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java index 21e33d65f0d..e5999c69baf 100644 --- a/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java +++ b/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.common.unit; +import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; @@ -173,6 +174,7 @@ public class FuzzinessTests extends ElasticsearchTestCase { } @Test + @LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/10638") public void testSimilarityToDistance() { assertThat(Fuzziness.fromSimilarity(0.5f).asDistance("ab"), equalTo(1)); assertThat(Fuzziness.fromSimilarity(0.66f).asDistance("abcefg"), equalTo(2)); diff --git a/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 69b5fd8b317..f25f6023bdd 100644 --- a/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1036,13 +1036,13 @@ public class InternalEngineTests extends ESTestCase { try (Engine.Searcher test = engine.acquireSearcher("test")) { assertEquals(numDocs, test.reader().numDocs()); } - engine.forceMerge(true, 1, false, false); + engine.forceMerge(true, 1, false, false, false); assertEquals(engine.segments(true).size(), 1); ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, -1, -1, testDocument(), B_1, false); Engine.Index index = new Engine.Index(null, newUid(Integer.toString(0)), doc); engine.delete(new Engine.Delete(index.type(), index.id(), index.uid())); - engine.forceMerge(true, 10, true, false); //expunge deletes + engine.forceMerge(true, 10, true, false, false); //expunge deletes assertEquals(engine.segments(true).size(), 1); try (Engine.Searcher test = engine.acquireSearcher("test")) { @@ -1053,7 +1053,7 @@ public class InternalEngineTests extends ESTestCase { doc = testParsedDocument(Integer.toString(1), Integer.toString(1), "test", null, -1, -1, testDocument(), B_1, false); index = new Engine.Index(null, newUid(Integer.toString(1)), doc); engine.delete(new Engine.Delete(index.type(), index.id(), index.uid())); - engine.forceMerge(true, 10, false, false); //expunge deletes + engine.forceMerge(true, 10, false, false, false); //expunge deletes assertEquals(engine.segments(true).size(), 1); try (Engine.Searcher test = engine.acquireSearcher("test")) { @@ -1091,7 +1091,7 @@ public class InternalEngineTests extends ESTestCase { engine.refresh("test"); indexed.countDown(); try { - engine.forceMerge(randomBoolean(), 1, false, randomBoolean()); + engine.forceMerge(randomBoolean(), 1, false, randomBoolean(), randomBoolean()); } catch (ForceMergeFailedEngineException ex) { // ok return; @@ -1107,7 +1107,7 @@ public class InternalEngineTests extends ESTestCase { startGun.countDown(); int someIters = randomIntBetween(1, 10); for (int i = 0; i < someIters; i++) { - engine.forceMerge(randomBoolean(), 1, false, randomBoolean()); + engine.forceMerge(randomBoolean(), 1, false, randomBoolean(), randomBoolean()); } indexed.await(); IOUtils.close(engine, translog); @@ -1713,7 +1713,7 @@ public class InternalEngineTests extends ESTestCase { } // Force merge so we know all merges are done before we start deleting: - engine.forceMerge(true, 1, false, false); + engine.forceMerge(true, 1, false, false, false); Searcher s = engine.acquireSearcher("test"); final long version1 = ((DirectoryReader) s.reader()).getVersion(); diff --git a/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java b/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java index e234c5141e4..01d7846740f 100644 --- a/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/all/SimpleAllMapperTests.java @@ -37,23 +37,38 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.Engine.Searcher; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.internal.IndexFieldMapper; import org.elasticsearch.index.mapper.internal.SizeFieldMapper; import org.elasticsearch.index.mapper.internal.SourceFieldMapper; -import org.elasticsearch.index.mapper.internal.TypeFieldMapper; import org.elasticsearch.test.ElasticsearchSingleNodeTest; import org.hamcrest.Matchers; import org.junit.Test; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath; import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.*; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; /** * @@ -82,7 +97,8 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest { @Test public void testAllMappersNoBoost() throws Exception { String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/noboost-mapping.json"); - DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping); + IndexService index = createIndex("test"); + DocumentMapper docMapper = index.mapperService().documentMapperParser().parse(mapping); byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json"); Document doc = docMapper.parse(new BytesArray(json)).rootDoc(); AllField field = (AllField) doc.getField("_all"); @@ -93,7 +109,6 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest { assertThat(allEntries.fields().contains("simple1"), equalTo(true)); FieldMapper mapper = docMapper.mappers().smartNameFieldMapper("_all"); assertThat(field.fieldType().omitNorms(), equalTo(false)); - assertThat(mapper.queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(TermQuery.class)); } @Test @@ -110,7 +125,7 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest { assertThat(allEntries.fields().contains("simple1"), equalTo(true)); FieldMapper mapper = docMapper.mappers().smartNameFieldMapper("_all"); assertThat(field.fieldType().omitNorms(), equalTo(false)); - assertThat(mapper.queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(TermQuery.class)); + assertThat(mapper.queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(AllTermQuery.class)); } @@ -223,7 +238,6 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest { boolean omitNorms = false; boolean stored = false; boolean enabled = true; - boolean autoBoost = false; boolean tv_stored = false; boolean tv_payloads = false; boolean tv_offsets = false; @@ -249,9 +263,6 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest { if (randomBoolean()) { booleanOptionList.add(new Tuple<>("enabled", enabled = randomBoolean())); } - if (randomBoolean()) { - booleanOptionList.add(new Tuple<>("auto_boost", autoBoost = randomBoolean())); - } if (randomBoolean()) { booleanOptionList.add(new Tuple<>("store_term_vector_offsets", tv_offsets = randomBoolean())); } @@ -312,14 +323,6 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest { } else { assertThat(field, nullValue()); } - - Term term = new Term("foo", "bar"); - Query query = builtDocMapper.allFieldMapper().queryStringTermQuery(term); - if (autoBoost) { - assertThat(query, equalTo((Query)new AllTermQuery(term))); - } else { - assertThat(query, equalTo((Query)new TermQuery(term))); - } if (similarity == null || similarity.equals("TF/IDF")) { assertThat(builtDocMapper.allFieldMapper().similarity(), nullValue()); } else { @@ -458,4 +461,19 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest { assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values")); } } + + public void testAutoBoost() throws Exception { + for (boolean boost : new boolean[] {false, true}) { + String index = "test_" + boost; + IndexService indexService = createIndex(index, client().admin().indices().prepareCreate(index).addMapping("type", "foo", "type=string" + (boost ? ",boost=2" : ""))); + client().prepareIndex(index, "type").setSource("foo", "bar").get(); + client().admin().indices().prepareRefresh(index).get(); + Query query = indexService.mapperService().documentMapper("type").allFieldMapper().termQuery("bar", null); + try (Searcher searcher = indexService.shard(0).acquireSearcher("tests")) { + query = searcher.searcher().rewrite(query); + final Class expected = boost ? AllTermQuery.class : TermQuery.class; + assertThat(query, Matchers.instanceOf(expected)); + } + } + } } diff --git a/src/test/java/org/elasticsearch/index/mapper/dynamic/DynamicMappingTests.java b/src/test/java/org/elasticsearch/index/mapper/dynamic/DynamicMappingTests.java index e5189c4027b..199c30d029a 100644 --- a/src/test/java/org/elasticsearch/index/mapper/dynamic/DynamicMappingTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/dynamic/DynamicMappingTests.java @@ -19,15 +19,31 @@ package org.elasticsearch.index.mapper.dynamic; import com.google.common.base.Predicate; +import com.google.common.collect.ImmutableMap; + +import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.ImmutableSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexService; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.DocumentMapper; +import org.elasticsearch.index.mapper.DocumentMapperParser; +import org.elasticsearch.index.mapper.FieldMappers; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.ParseContext; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.mapper.StrictDynamicMappingException; import org.elasticsearch.test.ElasticsearchSingleNodeTest; -import org.junit.Test; import java.io.IOException; import java.util.LinkedHashMap; @@ -39,7 +55,6 @@ import static org.hamcrest.Matchers.nullValue; public class DynamicMappingTests extends ElasticsearchSingleNodeTest { - @Test public void testDynamicTrue() throws IOException { String mapping = jsonBuilder().startObject().startObject("type") .field("dynamic", "true") @@ -60,7 +75,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest { assertThat(doc.rootDoc().get("field2"), equalTo("value2")); } - @Test public void testDynamicFalse() throws IOException { String mapping = jsonBuilder().startObject().startObject("type") .field("dynamic", "false") @@ -82,7 +96,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest { } - @Test public void testDynamicStrict() throws IOException { String mapping = jsonBuilder().startObject().startObject("type") .field("dynamic", "strict") @@ -116,7 +129,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest { } } - @Test public void testDynamicFalseWithInnerObjectButDynamicSetOnRoot() throws IOException { String mapping = jsonBuilder().startObject().startObject("type") .field("dynamic", "false") @@ -140,7 +152,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest { assertThat(doc.rootDoc().get("obj1.field2"), nullValue()); } - @Test public void testDynamicStrictWithInnerObjectButDynamicSetOnRoot() throws IOException { String mapping = jsonBuilder().startObject().startObject("type") .field("dynamic", "strict") @@ -173,7 +184,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest { assertTrue(mappers != null && mappers.isEmpty() == false); } - @Test public void testIndexingFailureDoesStillCreateType() throws IOException, InterruptedException { XContentBuilder mapping = jsonBuilder().startObject().startObject("_default_") .field("dynamic", "strict") @@ -202,7 +212,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest { } - @Test public void testTypeCreatedProperly() throws IOException, InterruptedException { XContentBuilder mapping = jsonBuilder().startObject().startObject("_default_") .field("dynamic", "strict") @@ -243,7 +252,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest { assertNotNull(getMappingsResponse.getMappings().get("test").get("type")); } - @Test public void testFieldsCreatedWithPartialParsing() throws IOException, InterruptedException { XContentBuilder mapping = jsonBuilder().startObject().startObject("doc") .startObject("properties") @@ -304,4 +312,178 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest { } })); } + + private String serialize(ToXContent mapper) throws Exception { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + mapper.toXContent(builder, new ToXContent.MapParams(ImmutableMap.of())); + return builder.endObject().string(); + } + + private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XContentBuilder builder) throws Exception { + Settings settings = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); + ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext("test", settings, parser, mapper, new ContentPath(0)); + SourceToParse source = SourceToParse.source(builder.bytes()); + ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source, null); + assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken()); + ctx.parser().nextToken(); + return mapper.root().parse(ctx); + } + + public void testDynamicMappingsNotNeeded() throws Exception { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("foo").field("type", "string").endObject().endObject() + .endObject().string(); + + DocumentMapper mapper = parser.parse(mapping); + Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()); + // foo is already defined in the mappings + assertNull(update); + } + + public void testField() throws Exception { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").endObject().endObject() + .endObject().string(); + + DocumentMapper mapper = parser.parse(mapping); + assertEquals(mapping, serialize(mapper)); + + Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject()); + assertNotNull(update); + // original mapping not modified + assertEquals(mapping, serialize(mapper)); + // but we have an update + assertEquals("{\"type\":{\"properties\":{\"foo\":{\"type\":\"string\"}}}}", serialize(update)); + } + + public void testIncremental() throws Exception { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + // Make sure that mapping updates are incremental, this is important for performance otherwise + // every new field introduction runs in linear time with the total number of fields + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").startObject("foo").field("type", "string").endObject().endObject() + .endObject().string(); + + DocumentMapper mapper = parser.parse(mapping); + assertEquals(mapping, serialize(mapper)); + + Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject()); + assertNotNull(update); + // original mapping not modified + assertEquals(mapping, serialize(mapper)); + // but we have an update + assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + // foo is NOT in the update + .startObject("bar").field("type", "string").endObject() + .endObject().endObject().string(), serialize(update)); + } + + public void testIntroduceTwoFields() throws Exception { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").endObject().endObject() + .endObject().string(); + + DocumentMapper mapper = parser.parse(mapping); + assertEquals(mapping, serialize(mapper)); + + Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject()); + assertNotNull(update); + // original mapping not modified + assertEquals(mapping, serialize(mapper)); + // but we have an update + assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("bar").field("type", "string").endObject() + .startObject("foo").field("type", "string").endObject() + .endObject().endObject().string(), serialize(update)); + } + + public void testObject() throws Exception { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").endObject().endObject() + .endObject().string(); + + DocumentMapper mapper = parser.parse(mapping); + assertEquals(mapping, serialize(mapper)); + + Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject()); + assertNotNull(update); + // original mapping not modified + assertEquals(mapping, serialize(mapper)); + // but we have an update + assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "string").endObject().endObject().endObject().endObject().endObject() + .endObject().endObject().endObject().string(), serialize(update)); + } + + public void testArray() throws Exception { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").endObject().endObject() + .endObject().string(); + + DocumentMapper mapper = parser.parse(mapping); + assertEquals(mapping, serialize(mapper)); + + Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo").value("bar").value("baz").endArray().endObject()); + assertNotNull(update); + // original mapping not modified + assertEquals(mapping, serialize(mapper)); + // but we have an update + assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("foo").field("type", "string").endObject() + .endObject().endObject().endObject().string(), serialize(update)); + } + + public void testInnerDynamicMapping() throws Exception { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties") + .startObject("foo").field("type", "object").endObject() + .endObject().endObject().endObject().string(); + + DocumentMapper mapper = parser.parse(mapping); + assertEquals(mapping, serialize(mapper)); + + Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject()); + assertNotNull(update); + // original mapping not modified + assertEquals(mapping, serialize(mapper)); + // but we have an update + assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "string").endObject().endObject().endObject().endObject().endObject() + .endObject().endObject().endObject().string(), serialize(update)); + } + + public void testComplexArray() throws Exception { + IndexService indexService = createIndex("test"); + DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") + .startObject("properties").endObject().endObject() + .endObject().string(); + + DocumentMapper mapper = parser.parse(mapping); + assertEquals(mapping, serialize(mapper)); + + Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo") + .startObject().field("bar", "baz").endObject() + .startObject().field("baz", 3).endObject() + .endArray().endObject()); + assertEquals(mapping, serialize(mapper)); + assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties") + .startObject("foo").startObject("properties") + .startObject("bar").field("type", "string").endObject() + .startObject("baz").field("type", "long").endObject() + .endObject().endObject() + .endObject().endObject().endObject().string(), serialize(update)); + } } diff --git a/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java index 26653c423a5..8df9f1e7d2a 100755 --- a/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java +++ b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalMapper.java @@ -185,7 +185,7 @@ public class ExternalMapper extends AbstractFieldMapper { } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { byte[] bytes = "Hello world".getBytes(Charset.defaultCharset()); binMapper.parse(context.createExternalValueContext(bytes)); @@ -210,6 +210,7 @@ public class ExternalMapper extends AbstractFieldMapper { if (copyTo != null) { copyTo.parse(context); } + return null; } @Override diff --git a/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalRootMapper.java b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalRootMapper.java index cb9596917c9..4ec787accb8 100644 --- a/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalRootMapper.java +++ b/src/test/java/org/elasticsearch/index/mapper/externalvalues/ExternalRootMapper.java @@ -39,7 +39,8 @@ public class ExternalRootMapper implements RootMapper { } @Override - public void parse(ParseContext context) throws IOException { + public Mapper parse(ParseContext context) throws IOException { + return null; } @Override diff --git a/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterTests.java b/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterTests.java index 3c690d97616..496cb58a692 100644 --- a/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterTests.java +++ b/src/test/java/org/elasticsearch/index/mapper/update/UpdateMappingOnClusterTests.java @@ -68,7 +68,7 @@ public class UpdateMappingOnClusterTests extends ElasticsearchIntegrationTest { "[_all] has different store_term_vector_payloads values", "[_all] has different analyzer", "[_all] has different similarity"}; - // auto_boost and fielddata and search_analyzer should not report conflict + // fielddata and search_analyzer should not report conflict testConflict(mapping, mappingUpdate, errorMessage); } diff --git a/src/test/java/org/elasticsearch/index/mapper/update/all_mapping_create_index.json b/src/test/java/org/elasticsearch/index/mapper/update/all_mapping_create_index.json index bda6b7bf684..2b9c42d50b2 100644 --- a/src/test/java/org/elasticsearch/index/mapper/update/all_mapping_create_index.json +++ b/src/test/java/org/elasticsearch/index/mapper/update/all_mapping_create_index.json @@ -2,7 +2,6 @@ "mappings": { "type": { "_all": { - "auto_boost": true, "store": true, "store_term_vectors": true, "store_term_vector_offsets": true, @@ -29,4 +28,4 @@ } } } -} \ No newline at end of file +} diff --git a/src/test/java/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json b/src/test/java/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json index 893804006d4..252aafefb08 100644 --- a/src/test/java/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json +++ b/src/test/java/org/elasticsearch/index/mapper/update/all_mapping_update_with_conflicts.json @@ -1,7 +1,6 @@ { "type": { "_all": { - "auto_boost": false, "store": false, "enabled": false, "store_term_vectors": false, @@ -17,4 +16,4 @@ } } } -} \ No newline at end of file +} diff --git a/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java b/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java index 3cd5b3981f2..264d1385874 100644 --- a/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java +++ b/src/test/java/org/elasticsearch/rest/action/admin/indices/upgrade/UpgradeTest.java @@ -137,32 +137,34 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { logger.info("--> Nodes upgrade complete"); logSegmentsState(); - final HttpRequestBuilder httpClient = httpClient(); - - assertNotUpgraded(httpClient, null); + assertNotUpgraded(httpClient(), null); final String indexToUpgrade = "test" + randomInt(numIndexes - 1); + + // This test fires up another node running an older version of ES, but because wire protocol changes across major ES versions, it + // means we can never generate ancient segments in this test (unless Lucene major version bumps but ES major version does not): + assertFalse(hasAncientSegments(httpClient(), indexToUpgrade)); logger.info("--> Running upgrade on index " + indexToUpgrade); - runUpgrade(httpClient, indexToUpgrade); + runUpgrade(httpClient(), indexToUpgrade); awaitBusy(new Predicate() { @Override public boolean apply(Object o) { try { - return isUpgraded(httpClient, indexToUpgrade); + return isUpgraded(httpClient(), indexToUpgrade); } catch (Exception e) { throw ExceptionsHelper.convertToRuntime(e); } } }); logger.info("--> Single index upgrade complete"); - + logger.info("--> Running upgrade on the rest of the indexes"); - runUpgrade(httpClient, null); + runUpgrade(httpClient(), null); logSegmentsState(); logger.info("--> Full upgrade complete"); - assertUpgraded(httpClient, null); + assertUpgraded(httpClient(), null); } - + static String upgradePath(String index) { String path = "/_upgrade"; if (index != null) { @@ -182,6 +184,39 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { } } + public static void assertNoAncientSegments(HttpRequestBuilder httpClient, String index) throws Exception { + for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { + assertTrue("index " + status.indexName + " should not be zero sized", status.totalBytes != 0); + // TODO: it would be better for this to be strictly greater, but sometimes an extra flush + // mysteriously happens after the second round of docs are indexed + assertTrue("index " + status.indexName + " should not have any ancient segments", + status.toUpgradeBytesAncient == 0); + assertTrue("index " + status.indexName + " should have recovered some segments from transaction log", + status.totalBytes >= status.toUpgradeBytes); + assertTrue("index " + status.indexName + " should need upgrading", status.toUpgradeBytes != 0); + } + } + + /** Returns true if there are any ancient segments. */ + public static boolean hasAncientSegments(HttpRequestBuilder httpClient, String index) throws Exception { + for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { + if (status.toUpgradeBytesAncient != 0) { + return true; + } + } + return false; + } + + /** Returns true if there are any old but not ancient segments. */ + public static boolean hasOldButNotAncientSegments(HttpRequestBuilder httpClient, String index) throws Exception { + for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { + if (status.toUpgradeBytes > status.toUpgradeBytesAncient) { + return true; + } + } + return false; + } + public static void assertUpgraded(HttpRequestBuilder httpClient, String index) throws Exception { for (UpgradeStatus status : getUpgradeStatus(httpClient, upgradePath(index))) { assertTrue("index " + status.indexName + " should not be zero sized", status.totalBytes != 0); @@ -209,7 +244,7 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { } } } - + static boolean isUpgraded(HttpRequestBuilder httpClient, String index) throws Exception { ESLogger logger = Loggers.getLogger(UpgradeTest.class); int toUpgrade = 0; @@ -224,11 +259,14 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { public final String indexName; public final int totalBytes; public final int toUpgradeBytes; + public final int toUpgradeBytesAncient; - public UpgradeStatus(String indexName, int totalBytes, int toUpgradeBytes) { + public UpgradeStatus(String indexName, int totalBytes, int toUpgradeBytes, int toUpgradeBytesAncient) { this.indexName = indexName; this.totalBytes = totalBytes; this.toUpgradeBytes = toUpgradeBytes; + this.toUpgradeBytesAncient = toUpgradeBytesAncient; + assert toUpgradeBytesAncient <= toUpgradeBytes; } } @@ -256,7 +294,9 @@ public class UpgradeTest extends ElasticsearchBackwardsCompatIntegrationTest { assertTrue("missing key size_to_upgrade_in_bytes for index " + index, status.containsKey("size_to_upgrade_in_bytes")); Object toUpgradeBytes = status.get("size_to_upgrade_in_bytes"); assertTrue("size_to_upgrade_in_bytes for index " + index + " is not an integer", toUpgradeBytes instanceof Integer); - ret.add(new UpgradeStatus(index, (Integer)totalBytes, (Integer)toUpgradeBytes)); + Object toUpgradeBytesAncient = status.get("size_to_upgrade_ancient_in_bytes"); + assertTrue("size_to_upgrade_ancient_in_bytes for index " + index + " is not an integer", toUpgradeBytesAncient instanceof Integer); + ret.add(new UpgradeStatus(index, (Integer) totalBytes, (Integer) toUpgradeBytes, (Integer) toUpgradeBytesAncient)); } return ret; } diff --git a/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java b/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java index a120c01bc80..c7687478aeb 100644 --- a/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java +++ b/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreTests.java @@ -81,7 +81,6 @@ import static org.hamcrest.Matchers.*; public class SharedClusterSnapshotRestoreTests extends AbstractSnapshotTests { @Test - @TestLogging("action.count:TRACE") public void basicWorkFlowTest() throws Exception { Client client = client(); @@ -149,6 +148,7 @@ public class SharedClusterSnapshotRestoreTests extends AbstractSnapshotTests { RestoreSnapshotResponse restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).execute().actionGet(); assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); + ensureGreen(); for (int i=0; i<5; i++) { assertHitCount(client.prepareCount("test-idx-1").get(), 100L); assertHitCount(client.prepareCount("test-idx-2").get(), 100L); @@ -162,6 +162,7 @@ public class SharedClusterSnapshotRestoreTests extends AbstractSnapshotTests { restoreSnapshotResponse = client.admin().cluster().prepareRestoreSnapshot("test-repo", "test-snap").setWaitForCompletion(true).setIndices("test-idx-*", "-test-idx-2").execute().actionGet(); assertThat(restoreSnapshotResponse.getRestoreInfo().totalShards(), greaterThan(0)); + ensureGreen(); for (int i=0; i<5; i++) { assertHitCount(client.prepareCount("test-idx-1").get(), 100L); } diff --git a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java index 0a8a3cc92ad..2793db1b043 100644 --- a/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java +++ b/src/test/java/org/elasticsearch/test/ElasticsearchIntegrationTest.java @@ -374,11 +374,6 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase .field("enabled", randomBoolean()) .endObject(); } - if (randomBoolean()) { - mappings.startObject(AllFieldMapper.NAME) - .field("auto_boost", true) - .endObject(); - } if (randomBoolean()) { mappings.startObject(SourceFieldMapper.NAME) .field("compress", randomBoolean()) @@ -973,7 +968,7 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase .health(Requests.clusterHealthRequest(indices).timeout(timeout).waitForGreenStatus().waitForEvents(Priority.LANGUID).waitForRelocatingShards(0)).actionGet(); if (actionGet.isTimedOut()) { logger.info("ensureGreen timed out, cluster state:\n{}\n{}", client().admin().cluster().prepareState().get().getState().prettyPrint(), client().admin().cluster().preparePendingClusterTasks().get().prettyPrint()); - assertThat("timed out waiting for green state", actionGet.isTimedOut(), equalTo(false)); + fail("timed out waiting for green state"); } assertThat(actionGet.getStatus(), equalTo(ClusterHealthStatus.GREEN)); logger.debug("indices {} are green", indices.length == 0 ? "[_all]" : indices); diff --git a/src/test/resources/org/elasticsearch/bwcompat/index-1.5.0.zip b/src/test/resources/org/elasticsearch/bwcompat/index-1.5.0.zip index 598b7cc79b5..4cb9dadb0f8 100644 Binary files a/src/test/resources/org/elasticsearch/bwcompat/index-1.5.0.zip and b/src/test/resources/org/elasticsearch/bwcompat/index-1.5.0.zip differ diff --git a/src/test/resources/org/elasticsearch/bwcompat/repo-1.5.0.zip b/src/test/resources/org/elasticsearch/bwcompat/repo-1.5.0.zip index f8688d90386..a07309a598d 100644 Binary files a/src/test/resources/org/elasticsearch/bwcompat/repo-1.5.0.zip and b/src/test/resources/org/elasticsearch/bwcompat/repo-1.5.0.zip differ