Merge branch 'feature/rank-eval' into feature/rank-eval-roundtrip-testing

This commit is contained in:
Isabel Drost-Fromm 2016-08-25 12:14:34 +02:00
commit 1d69a943be
206 changed files with 1342 additions and 1101 deletions

View File

@ -72,10 +72,9 @@ class ClusterFormationTasks {
throw new GradleException("bwcVersion must not be null if numBwcNodes is > 0")
}
// this is our current version distribution configuration we use for all kinds of REST tests etc.
project.configurations {
elasticsearchDistro
}
configureDistributionDependency(project, config.distribution, project.configurations.elasticsearchDistro, VersionProperties.elasticsearch)
String distroConfigName = "${task.name}_elasticsearchDistro"
Configuration distro = project.configurations.create(distroConfigName)
configureDistributionDependency(project, config.distribution, distro, VersionProperties.elasticsearch)
if (config.bwcVersion != null && config.numBwcNodes > 0) {
// if we have a cluster that has a BWC cluster we also need to configure a dependency on the BWC version
// this version uses the same distribution etc. and only differs in the version we depend on.
@ -91,10 +90,9 @@ class ClusterFormationTasks {
// we start N nodes and out of these N nodes there might be M bwc nodes.
// for each of those nodes we might have a different configuratioon
String elasticsearchVersion = VersionProperties.elasticsearch
Configuration configuration = project.configurations.elasticsearchDistro
if (i < config.numBwcNodes) {
elasticsearchVersion = config.bwcVersion
configuration = project.configurations.elasticsearchBwcDistro
distro = project.configurations.elasticsearchBwcDistro
}
NodeInfo node = new NodeInfo(config, i, project, task, elasticsearchVersion, sharedDir)
if (i == 0) {
@ -105,7 +103,7 @@ class ClusterFormationTasks {
config.seedNodePortsFile = node.transportPortsFile;
}
nodes.add(node)
startTasks.add(configureNode(project, task, cleanup, node, configuration))
startTasks.add(configureNode(project, task, cleanup, node, distro))
}
Task wait = configureWaitTask("${task.name}#wait", project, nodes, startTasks)

View File

@ -1,5 +1,5 @@
elasticsearch = 5.0.0-alpha6
lucene = 6.1.0
lucene = 6.2.0
# optional dependencies
spatial4j = 0.6

View File

@ -19,11 +19,11 @@
package org.apache.lucene.analysis.miscellaneous;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.util.CharArraySet;
import java.io.IOException;

View File

@ -84,7 +84,7 @@ public class Version {
public static final int V_5_0_0_alpha5_ID = 5000005;
public static final Version V_5_0_0_alpha5 = new Version(V_5_0_0_alpha5_ID, org.apache.lucene.util.Version.LUCENE_6_1_0);
public static final int V_5_0_0_alpha6_ID = 5000006;
public static final Version V_5_0_0_alpha6 = new Version(V_5_0_0_alpha6_ID, org.apache.lucene.util.Version.LUCENE_6_1_0);
public static final Version V_5_0_0_alpha6 = new Version(V_5_0_0_alpha6_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
public static final Version CURRENT = V_5_0_0_alpha6;
static {

View File

@ -249,14 +249,6 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
return this;
}
/**
* Sets no fields to be loaded, resulting in only id and type to be returned per field.
*/
public SearchRequestBuilder setNoStoredFields() {
sourceBuilder().noStoredFields();
return this;
}
/**
* Indicates whether the response should contain the stored _source for every hit
*/
@ -302,7 +294,6 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
/**
* Adds a stored field to load and return (note, it must be stored) as part of the search request.
* If none are specified, the source of the document will be return.
*/
public SearchRequestBuilder addStoredField(String field) {
sourceBuilder().storedField(field);
@ -380,9 +371,8 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
}
/**
* Sets the stored fields to load and return as part of the search request. If none
* are specified, the source of the document will be returned.
*
* Adds stored fields to load and return (note, it must be stored) as part of the search request.
* To disable the stored fields entirely (source and metadata fields) use {@code storedField("_none_")}.
* @deprecated Use {@link SearchRequestBuilder#storedFields(String...)} instead.
*/
@Deprecated
@ -392,8 +382,8 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
}
/**
* Sets the fields to load and return as part of the search request. If none
* are specified, the source of the document will be returned.
* Adds stored fields to load and return (note, it must be stored) as part of the search request.
* To disable the stored fields entirely (source and metadata fields) use {@code storedField("_none_")}.
*/
public SearchRequestBuilder storedFields(String... fields) {
sourceBuilder().storedFields(Arrays.asList(fields));

View File

@ -88,7 +88,7 @@ import java.util.Objects;
public class Lucene {
public static final String LATEST_DOC_VALUES_FORMAT = "Lucene54";
public static final String LATEST_POSTINGS_FORMAT = "Lucene50";
public static final String LATEST_CODEC = "Lucene60";
public static final String LATEST_CODEC = "Lucene62";
static {
Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class);

View File

@ -20,8 +20,9 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.LegacyNumericTokenStream;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ar.ArabicAnalyzer;
import org.apache.lucene.analysis.bg.BulgarianAnalyzer;
import org.apache.lucene.analysis.br.BrazilianAnalyzer;
@ -55,7 +56,6 @@ import org.apache.lucene.analysis.sv.SwedishAnalyzer;
import org.apache.lucene.analysis.th.ThaiAnalyzer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.util.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.FileSystemUtils;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.ar.ArabicAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class ArabicAnalyzerProvider extends AbstractIndexAnalyzerProvider<Arabic
public ArabicAnalyzer get() {
return this.arabicAnalyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.hy.ArmenianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class ArmenianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Arme
public ArmenianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.eu.BasqueAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class BasqueAnalyzerProvider extends AbstractIndexAnalyzerProvider<Basque
public BasqueAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.br.BrazilianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class BrazilianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Bra
public BrazilianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.br.BrazilianStemFilter;
import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.analysis.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -42,4 +42,4 @@ public class BrazilianStemTokenFilterFactory extends AbstractTokenFilterFactory
public TokenStream create(TokenStream tokenStream) {
return new BrazilianStemFilter(new SetKeywordMarkerFilter(tokenStream, exclusions));
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.bg.BulgarianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class BulgarianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Bul
public BulgarianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.ca.CatalanAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class CatalanAnalyzerProvider extends AbstractIndexAnalyzerProvider<Catal
public CatalanAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.cjk.CJKAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -44,4 +44,4 @@ public class CjkAnalyzerProvider extends AbstractIndexAnalyzerProvider<CJKAnalyz
public CJKAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,10 +19,10 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.commongrams.CommonGramsFilter;
import org.apache.lucene.analysis.commongrams.CommonGramsQueryFilter;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.cz.CzechAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class CzechAnalyzerProvider extends AbstractIndexAnalyzerProvider<CzechAn
public CzechAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.da.DanishAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class DanishAnalyzerProvider extends AbstractIndexAnalyzerProvider<Danish
public DanishAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.nl.DutchAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class DutchAnalyzerProvider extends AbstractIndexAnalyzerProvider<DutchAn
public DutchAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,10 +19,10 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter;
import org.apache.lucene.analysis.snowball.SnowballFilter;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -45,4 +45,4 @@ public class DutchStemTokenFilterFactory extends AbstractTokenFilterFactory {
tokenStream = new SetKeywordMarkerFilter(tokenStream, exclusions);
return new SnowballFilter(tokenStream, new DutchStemmer());
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.analysis.util.ElisionFilter;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
@ -47,4 +47,4 @@ public class ElisionTokenFilterFactory extends AbstractTokenFilterFactory implem
public Object getMultiTermComponent() {
return this;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.en.EnglishAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class EnglishAnalyzerProvider extends AbstractIndexAnalyzerProvider<Engli
public EnglishAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -20,14 +20,14 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.core.StopFilter;
import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter;
import org.apache.lucene.analysis.miscellaneous.FingerprintFilter;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.util.CharArraySet;
/** OpenRefine Fingerprinting, which uses a Standard tokenizer and lowercase + stop + fingerprint + asciifolding filters */
public final class FingerprintAnalyzer extends Analyzer {

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.analysis.CharArraySet;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.fi.FinnishAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class FinnishAnalyzerProvider extends AbstractIndexAnalyzerProvider<Finni
public FinnishAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.fr.FrenchAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class FrenchAnalyzerProvider extends AbstractIndexAnalyzerProvider<French
public FrenchAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,10 +19,10 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter;
import org.apache.lucene.analysis.snowball.SnowballFilter;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -45,4 +45,4 @@ public class FrenchStemTokenFilterFactory extends AbstractTokenFilterFactory {
tokenStream = new SetKeywordMarkerFilter(tokenStream, exclusions);
return new SnowballFilter(tokenStream, new FrenchStemmer());
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.gl.GalicianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class GalicianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Gali
public GalicianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.de.GermanAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class GermanAnalyzerProvider extends AbstractIndexAnalyzerProvider<German
public GermanAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,10 +19,10 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.de.GermanStemFilter;
import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class GermanStemTokenFilterFactory extends AbstractTokenFilterFactory {
public TokenStream create(TokenStream tokenStream) {
return new GermanStemFilter(new SetKeywordMarkerFilter(tokenStream, exclusions));
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.hi.HindiAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class HindiAnalyzerProvider extends AbstractIndexAnalyzerProvider<HindiAn
public HindiAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.hu.HungarianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class HungarianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Hun
public HungarianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.id.IndonesianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class IndonesianAnalyzerProvider extends AbstractIndexAnalyzerProvider<In
public IndonesianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.ga.IrishAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class IrishAnalyzerProvider extends AbstractIndexAnalyzerProvider<IrishAn
public IrishAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.it.ItalianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class ItalianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Itali
public ItalianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,9 +19,9 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.KeepWordFilter;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.util.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;

View File

@ -19,9 +19,9 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.lv.LatvianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class LatvianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Latvi
public LatvianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.lt.LithuanianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class LithuanianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Li
public LithuanianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.el.GreekLowerCaseFilter;
import org.apache.lucene.analysis.ga.IrishLowerCaseFilter;
import org.apache.lucene.analysis.tr.TurkishLowerCaseFilter;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.no.NorwegianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class NorwegianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Nor
public NorwegianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -20,12 +20,12 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.core.StopFilter;
import org.apache.lucene.analysis.pattern.PatternTokenizer;
import org.apache.lucene.analysis.util.CharArraySet;
import java.util.regex.Pattern;

View File

@ -20,8 +20,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.Version;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.pt.PortugueseAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class PortugueseAnalyzerProvider extends AbstractIndexAnalyzerProvider<Po
public PortugueseAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.ro.RomanianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Roma
public RomanianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.ru.RussianAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class RussianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Russi
public RussianAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -27,7 +27,7 @@ import org.elasticsearch.index.IndexSettings;
/**
* Factory for {@link ScandinavianFoldingFilter}
*/
public class ScandinavianFoldingFilterFactory extends AbstractTokenFilterFactory {
public class ScandinavianFoldingFilterFactory extends AbstractTokenFilterFactory implements MultiTermAwareComponent {
public ScandinavianFoldingFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);
@ -38,4 +38,8 @@ public class ScandinavianFoldingFilterFactory extends AbstractTokenFilterFactory
return new ScandinavianFoldingFilter(tokenStream);
}
@Override
public Object getMultiTermComponent() {
return this;
}
}

View File

@ -27,7 +27,7 @@ import org.elasticsearch.index.IndexSettings;
/**
* Factory for {@link ScandinavianNormalizationFilter}
*/
public class ScandinavianNormalizationFilterFactory extends AbstractTokenFilterFactory {
public class ScandinavianNormalizationFilterFactory extends AbstractTokenFilterFactory implements MultiTermAwareComponent {
public ScandinavianNormalizationFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);
@ -38,4 +38,8 @@ public class ScandinavianNormalizationFilterFactory extends AbstractTokenFilterF
return new ScandinavianNormalizationFilter(tokenStream);
}
@Override
public Object getMultiTermComponent() {
return this;
}
}

View File

@ -20,16 +20,16 @@ package org.elasticsearch.index.analysis;
*/
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.core.StopFilter;
import org.apache.lucene.analysis.en.EnglishPossessiveFilter;
import org.apache.lucene.analysis.snowball.SnowballFilter;
import org.apache.lucene.analysis.standard.StandardFilter;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.tr.TurkishLowerCaseFilter;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.util.Version;
/** Filters {@link StandardTokenizer} with {@link StandardFilter}, {@link

View File

@ -18,11 +18,11 @@
*/
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.de.GermanAnalyzer;
import org.apache.lucene.analysis.fr.FrenchAnalyzer;
import org.apache.lucene.analysis.nl.DutchAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.ckb.SoraniAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class SoraniAnalyzerProvider extends AbstractIndexAnalyzerProvider<Sorani
public SoraniAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.es.SpanishAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class SpanishAnalyzerProvider extends AbstractIndexAnalyzerProvider<Spani
public SpanishAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,9 +19,9 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;

View File

@ -19,15 +19,15 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.StopwordAnalyzerBase;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.core.StopFilter;
import org.apache.lucene.analysis.standard.StandardFilter;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.analysis.util.StopwordAnalyzerBase;
public class StandardHtmlStripAnalyzer extends StopwordAnalyzerBase {

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class StopAnalyzerProvider extends AbstractIndexAnalyzerProvider<StopAnal
public StopAnalyzer get() {
return this.stopAnalyzer;
}
}
}

View File

@ -19,10 +19,10 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.core.StopFilter;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.search.suggest.analyzing.SuggestStopFilter;
import org.apache.lucene.util.Version;
import org.elasticsearch.common.settings.Settings;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.sv.SwedishAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class SwedishAnalyzerProvider extends AbstractIndexAnalyzerProvider<Swedi
public SwedishAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -20,9 +20,9 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.apache.lucene.analysis.synonym.SolrSynonymParser;
import org.apache.lucene.analysis.synonym.SynonymFilter;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@ -43,4 +43,4 @@ public class TurkishAnalyzerProvider extends AbstractIndexAnalyzerProvider<Turki
public TurkishAnalyzer get() {
return this.analyzer;
}
}
}

View File

@ -19,10 +19,10 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilter;
import org.apache.lucene.analysis.miscellaneous.WordDelimiterIterator;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;

View File

@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis.compound;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.compound.CompoundWordTokenFilterBase;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;

View File

@ -21,8 +21,7 @@ package org.elasticsearch.index.codec;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
import org.apache.lucene.codecs.lucene54.Lucene54Codec;
import org.apache.lucene.codecs.lucene60.Lucene60Codec;
import org.apache.lucene.codecs.lucene62.Lucene62Codec;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.logging.ESLogger;
@ -48,8 +47,8 @@ public class CodecService {
public CodecService(@Nullable MapperService mapperService, ESLogger logger) {
final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
if (mapperService == null) {
codecs.put(DEFAULT_CODEC, new Lucene60Codec());
codecs.put(BEST_COMPRESSION_CODEC, new Lucene60Codec(Mode.BEST_COMPRESSION));
codecs.put(DEFAULT_CODEC, new Lucene62Codec());
codecs.put(BEST_COMPRESSION_CODEC, new Lucene62Codec(Mode.BEST_COMPRESSION));
} else {
codecs.put(DEFAULT_CODEC,
new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger));

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.codec;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
import org.apache.lucene.codecs.lucene60.Lucene60Codec;
import org.apache.lucene.codecs.lucene62.Lucene62Codec;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
@ -39,7 +39,7 @@ import org.elasticsearch.index.mapper.MapperService;
* configured for a specific field the default postings format is used.
*/
// LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version
public class PerFieldMappingPostingFormatCodec extends Lucene60Codec {
public class PerFieldMappingPostingFormatCodec extends Lucene62Codec {
private final ESLogger logger;
private final MapperService mapperService;

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.engine;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.index.translog.Translog;
/** Holds a deleted version, which just adds a timestamp to {@link VersionValue} so we know when we can expire the deletion. */
@ -30,8 +29,8 @@ class DeleteVersionValue extends VersionValue {
private final long time;
public DeleteVersionValue(long version, long time, Translog.Location translogLocation) {
super(version, translogLocation);
public DeleteVersionValue(long version, long time) {
super(version);
this.time = time;
}
@ -47,7 +46,6 @@ class DeleteVersionValue extends VersionValue {
@Override
public long ramBytesUsed() {
Translog.Location translogLocation = translogLocation();
return BASE_RAM_BYTES_USED + (translogLocation != null ? translogLocation.ramBytesUsed() : 0);
return BASE_RAM_BYTES_USED;
}
}

View File

@ -1000,32 +1000,23 @@ public abstract class Engine implements Closeable {
public static class GetResult implements Releasable {
private final boolean exists;
private final long version;
private final Translog.Source source;
private final Versions.DocIdAndVersion docIdAndVersion;
private final Searcher searcher;
public static final GetResult NOT_EXISTS = new GetResult(false, Versions.NOT_FOUND, null);
public static final GetResult NOT_EXISTS = new GetResult(false, Versions.NOT_FOUND, null, null);
/**
* Build a realtime get result from the translog.
*/
public GetResult(boolean exists, long version, @Nullable Translog.Source source) {
this.source = source;
private GetResult(boolean exists, long version, Versions.DocIdAndVersion docIdAndVersion, Searcher searcher) {
this.exists = exists;
this.version = version;
this.docIdAndVersion = null;
this.searcher = null;
this.docIdAndVersion = docIdAndVersion;
this.searcher = searcher;
}
/**
* Build a non-realtime get result from the searcher.
*/
public GetResult(Searcher searcher, Versions.DocIdAndVersion docIdAndVersion) {
this.exists = true;
this.source = null;
this.version = docIdAndVersion.version;
this.docIdAndVersion = docIdAndVersion;
this.searcher = searcher;
this(true, docIdAndVersion.version, docIdAndVersion, searcher);
}
public boolean exists() {
@ -1036,11 +1027,6 @@ public abstract class Engine implements Closeable {
return this.version;
}
@Nullable
public Translog.Source source() {
return source;
}
public Searcher searcher() {
return this.searcher;
}
@ -1055,9 +1041,7 @@ public abstract class Engine implements Closeable {
}
public void release() {
if (searcher != null) {
searcher.close();
}
Releasables.close(searcher);
}
}

View File

@ -328,10 +328,7 @@ public class InternalEngine extends Engine {
throw new VersionConflictEngineException(shardId, uid.type(), uid.id(),
get.versionType().explainConflictForReads(versionValue.version(), get.version()));
}
Translog.Operation op = translog.read(versionValue.translogLocation());
if (op != null) {
return new GetResult(true, versionValue.version(), op.getSource());
}
refresh("realtime_get");
}
}
@ -368,11 +365,11 @@ public class InternalEngine extends Engine {
return currentVersion;
}
private static VersionValueSupplier NEW_VERSION_VALUE = (u, t, l) -> new VersionValue(u, l);
private static VersionValueSupplier NEW_VERSION_VALUE = (u, t) -> new VersionValue(u);
@FunctionalInterface
private interface VersionValueSupplier {
VersionValue apply(long updatedVersion, long time, Translog.Location location);
VersionValue apply(long updatedVersion, long time);
}
private <T extends Engine.Operation> void maybeAddToTranslog(
@ -383,14 +380,9 @@ public class InternalEngine extends Engine {
if (op.origin() != Operation.Origin.LOCAL_TRANSLOG_RECOVERY) {
final Translog.Location translogLocation = translog.add(toTranslogOp.apply(op));
op.setTranslogLocation(translogLocation);
versionMap.putUnderLock(op.uid().bytes(), toVersionValue.apply(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis(), op.getTranslogLocation()));
} else {
// we do not replay in to the translog, so there is no
// translog location; that is okay because real-time
// gets are not possible during recovery and we will
// flush when the recovery is complete
versionMap.putUnderLock(op.uid().bytes(), toVersionValue.apply(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis(), null));
}
versionMap.putUnderLock(op.uid().bytes(), toVersionValue.apply(updatedVersion, engineConfig.getThreadPool().estimatedTimeInMillis()));
}
@Override

View File

@ -42,7 +42,7 @@ class LiveVersionMap implements ReferenceManager.RefreshListener, Accountable {
// Used while refresh is running, and to hold adds/deletes until refresh finishes. We read from both current and old on lookup:
final Map<BytesRef,VersionValue> old;
public Maps(Map<BytesRef,VersionValue> current, Map<BytesRef,VersionValue> old) {
this.current = current;
this.old = old;
@ -256,7 +256,7 @@ class LiveVersionMap implements ReferenceManager.RefreshListener, Accountable {
return ramBytesUsedCurrent.get() + ramBytesUsedTombstones.get();
}
/** Returns how much RAM would be freed up by refreshing. This is {@link ramBytesUsed} except does not include tombstones because they
/** Returns how much RAM would be freed up by refreshing. This is {@link #ramBytesUsed} except does not include tombstones because they
* don't clear on refresh. */
long ramBytesUsedForRefresh() {
return ramBytesUsedCurrent.get();

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.engine;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.index.translog.Translog;
import java.util.Collection;
import java.util.Collections;
@ -31,11 +30,9 @@ class VersionValue implements Accountable {
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(VersionValue.class);
private final long version;
private final Translog.Location translogLocation;
public VersionValue(long version, Translog.Location translogLocation) {
public VersionValue(long version) {
this.version = version;
this.translogLocation = translogLocation;
}
public long time() {
@ -50,13 +47,10 @@ class VersionValue implements Accountable {
return false;
}
public Translog.Location translogLocation() {
return this.translogLocation;
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + (translogLocation != null ? translogLocation.ramBytesUsed() : 0);
return BASE_RAM_BYTES_USED;
}
@Override

View File

@ -85,7 +85,6 @@ public final class ShardGetService extends AbstractIndexShardComponent {
return new GetStats(existsMetric.count(), TimeUnit.NANOSECONDS.toMillis(existsMetric.sum()), missingMetric.count(), TimeUnit.NANOSECONDS.toMillis(missingMetric.sum()), currentMetric.count());
}
public GetResult get(String type, String id, String[] gFields, boolean realtime, long version, VersionType versionType, FetchSourceContext fetchSourceContext, boolean ignoreErrorsOnGeneratedFields) {
currentMetric.inc();
try {
@ -182,140 +181,12 @@ public final class ShardGetService extends AbstractIndexShardComponent {
try {
// break between having loaded it from translog (so we only have _source), and having a document to load
if (get.docIdAndVersion() != null) {
return innerGetLoadFromStoredFields(type, id, gFields, fetchSourceContext, get, mapperService);
} else {
Translog.Source source = get.source();
Map<String, GetField> fields = null;
SearchLookup searchLookup = null;
// we can only load scripts that can run against the source
Set<String> neededFields = new HashSet<>();
// add meta fields
neededFields.add(RoutingFieldMapper.NAME);
DocumentMapper docMapper = mapperService.documentMapper(type);
if (docMapper.parentFieldMapper().active()) {
neededFields.add(ParentFieldMapper.NAME);
}
if (docMapper.timestampFieldMapper().enabled()) {
neededFields.add(TimestampFieldMapper.NAME);
}
if (docMapper.TTLFieldMapper().enabled()) {
neededFields.add(TTLFieldMapper.NAME);
}
// add requested fields
if (gFields != null) {
neededFields.addAll(Arrays.asList(gFields));
}
for (String field : neededFields) {
if (SourceFieldMapper.NAME.equals(field)) {
// dealt with when normalizing fetchSourceContext.
continue;
}
Object value = null;
if (field.equals(RoutingFieldMapper.NAME)) {
value = source.routing;
} else if (field.equals(ParentFieldMapper.NAME) && docMapper.parentFieldMapper().active()) {
value = source.parent;
} else if (field.equals(TimestampFieldMapper.NAME) && docMapper.timestampFieldMapper().enabled()) {
value = source.timestamp;
} else if (field.equals(TTLFieldMapper.NAME) && docMapper.TTLFieldMapper().enabled()) {
// Call value for search with timestamp + ttl here to display the live remaining ttl value and be consistent with the search result display
if (source.ttl > 0) {
value = docMapper.TTLFieldMapper().valueForSearch(source.timestamp + source.ttl);
}
} else {
if (searchLookup == null) {
searchLookup = new SearchLookup(mapperService, null, new String[]{type});
searchLookup.source().setSource(source.source);
}
FieldMapper fieldMapper = docMapper.mappers().smartNameFieldMapper(field);
if (fieldMapper == null) {
if (docMapper.objectMappers().get(field) != null) {
// Only fail if we know it is a object field, missing paths / fields shouldn't fail.
throw new IllegalArgumentException("field [" + field + "] isn't a leaf field");
}
} else if (shouldGetFromSource(ignoreErrorsOnGeneratedFields, docMapper, fieldMapper)) {
List<Object> values = searchLookup.source().extractRawValues(field);
if (!values.isEmpty()) {
value = values;
}
}
}
if (value != null) {
if (fields == null) {
fields = new HashMap<>(2);
}
if (value instanceof List) {
fields.put(field, new GetField(field, (List) value));
} else {
fields.put(field, new GetField(field, Collections.singletonList(value)));
}
}
}
// deal with source, but only if it's enabled (we always have it from the translog)
BytesReference sourceToBeReturned = null;
SourceFieldMapper sourceFieldMapper = docMapper.sourceMapper();
if (fetchSourceContext.fetchSource() && sourceFieldMapper.enabled()) {
sourceToBeReturned = source.source;
// Cater for source excludes/includes at the cost of performance
// We must first apply the field mapper filtering to make sure we get correct results
// in the case that the fetchSourceContext white lists something that's not included by the field mapper
boolean sourceFieldFiltering = sourceFieldMapper.includes().length > 0 || sourceFieldMapper.excludes().length > 0;
boolean sourceFetchFiltering = fetchSourceContext.includes().length > 0 || fetchSourceContext.excludes().length > 0;
if (sourceFieldFiltering || sourceFetchFiltering) {
// TODO: The source might parsed and available in the sourceLookup but that one uses unordered maps so different. Do we care?
Tuple<XContentType, Map<String, Object>> typeMapTuple = XContentHelper.convertToMap(source.source, true);
XContentType sourceContentType = typeMapTuple.v1();
Map<String, Object> sourceAsMap = typeMapTuple.v2();
if (sourceFieldFiltering) {
sourceAsMap = XContentMapValues.filter(sourceAsMap, sourceFieldMapper.includes(), sourceFieldMapper.excludes());
}
if (sourceFetchFiltering) {
sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes());
}
try {
sourceToBeReturned = XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap).bytes();
} catch (IOException e) {
throw new ElasticsearchException("Failed to get type [" + type + "] and id [" + id + "] with includes/excludes set", e);
}
}
}
return new GetResult(shardId.getIndexName(), type, id, get.version(), get.exists(), sourceToBeReturned, fields);
}
return innerGetLoadFromStoredFields(type, id, gFields, fetchSourceContext, get, mapperService);
} finally {
get.release();
}
}
protected boolean shouldGetFromSource(boolean ignoreErrorsOnGeneratedFields, DocumentMapper docMapper, FieldMapper fieldMapper) {
if (!fieldMapper.isGenerated()) {
//if the field is always there we check if either source mapper is enabled, in which case we get the field
// from source, or, if the field is stored, in which case we have to get if from source here also (we are in the translog phase, doc not indexed yet, we annot access the stored fields)
return docMapper.sourceMapper().enabled() || fieldMapper.fieldType().stored();
} else {
if (!fieldMapper.fieldType().stored()) {
//if it is not stored, user will not get the generated field back
return false;
} else {
if (ignoreErrorsOnGeneratedFields) {
return false;
} else {
throw new ElasticsearchException("Cannot access field " + fieldMapper.name() + " from transaction log. You can only get this field after refresh() has been called.");
}
}
}
}
private GetResult innerGetLoadFromStoredFields(String type, String id, String[] gFields, FetchSourceContext fetchSourceContext, Engine.GetResult get, MapperService mapperService) {
Map<String, GetField> fields = null;
BytesReference source = null;

View File

@ -184,7 +184,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder;
if (parserContext.indexVersionCreated().before(Version.V_2_2_0)) {
builder = new GeoPointFieldMapperLegacy.Builder(name);
builder = new LegacyGeoPointFieldMapper.Builder(name);
} else {
builder = new GeoPointFieldMapper.Builder(name);
}
@ -232,8 +232,8 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
}
}
if (builder instanceof GeoPointFieldMapperLegacy.Builder) {
return GeoPointFieldMapperLegacy.parse((GeoPointFieldMapperLegacy.Builder) builder, node, parserContext);
if (builder instanceof LegacyGeoPointFieldMapper.Builder) {
return LegacyGeoPointFieldMapper.parse((LegacyGeoPointFieldMapper.Builder) builder, node, parserContext);
}
return (GeoPointFieldMapper.Builder) builder;

View File

@ -25,7 +25,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
@ -50,7 +49,7 @@ import java.util.Map;
* "lon" : 2.1
* }
*/
public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implements ArrayValueMapperParser {
public class LegacyGeoPointFieldMapper extends BaseGeoPointFieldMapper implements ArrayValueMapperParser {
public static final String CONTENT_TYPE = "geo_point";
@ -74,7 +73,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
/**
* Concrete builder for legacy GeoPointField
*/
public static class Builder extends BaseGeoPointFieldMapper.Builder<Builder, GeoPointFieldMapperLegacy> {
public static class Builder extends BaseGeoPointFieldMapper.Builder<Builder, LegacyGeoPointFieldMapper> {
private Boolean coerce;
@ -99,25 +98,26 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
}
@Override
public GeoPointFieldMapperLegacy build(BuilderContext context, String simpleName, MappedFieldType fieldType,
MappedFieldType defaultFieldType, Settings indexSettings, FieldMapper latMapper,
FieldMapper lonMapper, KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
CopyTo copyTo) {
public LegacyGeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType,
MappedFieldType defaultFieldType, Settings indexSettings, FieldMapper latMapper, FieldMapper lonMapper,
KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
CopyTo copyTo) {
fieldType.setTokenized(false);
setupFieldType(context);
fieldType.setHasDocValues(false);
defaultFieldType.setHasDocValues(false);
return new GeoPointFieldMapperLegacy(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper,
geoHashMapper, multiFields, ignoreMalformed, coerce(context), copyTo);
return new LegacyGeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, latMapper,
lonMapper, geoHashMapper, multiFields, ignoreMalformed, coerce(context), copyTo);
}
@Override
public GeoPointFieldMapperLegacy build(BuilderContext context) {
public LegacyGeoPointFieldMapper build(BuilderContext context) {
return super.build(context);
}
}
public static Builder parse(Builder builder, Map<String, Object> node, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException {
public static Builder parse(Builder builder, Map<String, Object> node, Mapper.TypeParser.ParserContext parserContext)
throws MapperParsingException {
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = entry.getKey();
@ -155,7 +155,8 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
return instance;
}
/** Get an instance based on the expected precision. Here are examples of the number of required bytes per value depending on the
/** Get an instance based on the expected precision. Here are examples of the number of required bytes per value
* depending on the
* expected precision:<ul>
* <li>1km: 4 bytes</li>
* <li>3m: 6 bytes</li>
@ -182,13 +183,15 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
this.numBytes = numBytes;
this.numBytesPerCoordinate = numBytes / 2;
this.factor = Math.pow(2, - numBytesPerCoordinate * 8 + 9);
assert (1L << (numBytesPerCoordinate * 8 - 1)) * factor > 180 && (1L << (numBytesPerCoordinate * 8 - 2)) * factor < 180 : numBytesPerCoordinate + " " + factor;
assert (1L << (numBytesPerCoordinate * 8 - 1)) * factor > 180 && (1L << (numBytesPerCoordinate * 8 - 2))
* factor < 180 : numBytesPerCoordinate + " " + factor;
if (numBytes == MAX_NUM_BYTES) {
// no precision loss compared to a double
precision = new DistanceUnit.Distance(0, DistanceUnit.DEFAULT);
} else {
// factor/2 because we use Math.round instead of a cast to convert the double to a long
precision = new DistanceUnit.Distance(
GeoDistance.PLANE.calculate(0, 0, factor / 2, factor / 2, DistanceUnit.DEFAULT), // factor/2 because we use Math.round instead of a cast to convert the double to a long
GeoDistance.PLANE.calculate(0, 0, factor / 2, factor / 2, DistanceUnit.DEFAULT),
DistanceUnit.DEFAULT);
}
}
@ -257,10 +260,9 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
protected Explicit<Boolean> coerce;
public GeoPointFieldMapperLegacy(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
FieldMapper latMapper, FieldMapper lonMapper,
KeywordFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
Explicit<Boolean> coerce, CopyTo copyTo) {
public LegacyGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
Settings indexSettings, FieldMapper latMapper, FieldMapper lonMapper, KeywordFieldMapper geoHashMapper,
MultiFields multiFields, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, latMapper, lonMapper, geoHashMapper, multiFields,
ignoreMalformed, copyTo);
this.coerce = coerce;
@ -270,7 +272,7 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
super.doMerge(mergeWith, updateAllTypes);
GeoPointFieldMapperLegacy gpfmMergeWith = (GeoPointFieldMapperLegacy) mergeWith;
LegacyGeoPointFieldMapper gpfmMergeWith = (LegacyGeoPointFieldMapper) mergeWith;
if (gpfmMergeWith.coerce.explicit()) {
if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) {
throw new IllegalArgumentException("mapper [" + fieldType().name() + "] has different [coerce]");
@ -302,14 +304,16 @@ public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implement
}
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().name(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
Field field = new Field(fieldType().name(), Double.toString(point.lat()) + ','
+ Double.toString(point.lon()), fieldType());
context.doc().add(field);
}
super.parse(context, point, geoHash);
if (fieldType().hasDocValues()) {
CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().name());
CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc()
.getByKey(fieldType().name());
if (field == null) {
field = new CustomGeoPointDocValuesField(fieldType().name(), point.lat(), point.lon());
context.doc().addWithKey(fieldType().name(), field);

View File

@ -38,10 +38,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
import org.elasticsearch.index.search.geo.IndexedGeoBoundingBoxQuery;
import org.elasticsearch.index.search.geo.LegacyInMemoryGeoBoundingBoxQuery;
import org.elasticsearch.index.search.geo.LegacyIndexedGeoBoundingBoxQuery;
import java.io.IOException;
import java.util.Objects;
@ -371,12 +371,12 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
Query query;
switch(type) {
case INDEXED:
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
query = IndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType);
LegacyGeoPointFieldMapper.GeoPointFieldType geoFieldType = ((LegacyGeoPointFieldMapper.GeoPointFieldType) fieldType);
query = LegacyIndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType);
break;
case MEMORY:
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
query = new InMemoryGeoBoundingBoxQuery(luceneTopLeft, luceneBottomRight, indexFieldData);
query = new LegacyInMemoryGeoBoundingBoxQuery(luceneTopLeft, luceneBottomRight, indexFieldData);
break;
default:
// Someone extended the type enum w/o adjusting this switch statement.

View File

@ -38,7 +38,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
@ -298,7 +298,7 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
double normDistance = geoDistance.normalize(this.distance, DistanceUnit.DEFAULT);
if (indexVersionCreated.before(Version.V_2_2_0)) {
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
LegacyGeoPointFieldMapper.GeoPointFieldType geoFieldType = ((LegacyGeoPointFieldMapper.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType);
String bboxOptimization = Strings.isEmpty(optimizeBbox) ? DEFAULT_OPTIMIZE_BBOX : optimizeBbox;
return new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance,

View File

@ -38,7 +38,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
@ -348,7 +348,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
final Version indexVersionCreated = context.indexVersionCreated();
if (indexVersionCreated.before(Version.V_2_2_0)) {
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
LegacyGeoPointFieldMapper.GeoPointFieldType geoFieldType = ((LegacyGeoPointFieldMapper.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
String bboxOptimization = Strings.isEmpty(optimizeBbox) ? DEFAULT_OPTIMIZE_BBOX : optimizeBbox;
return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType,

View File

@ -34,6 +34,7 @@ import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
@ -137,7 +138,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
private boolean version;
private boolean trackScores;
private List<String> storedFieldNames;
private StoredFieldsContext storedFieldsContext;
private QueryBuilder query = DEFAULT_INNER_HIT_QUERY;
private List<SortBuilder<?>> sorts;
private List<String> docValueFields;
@ -156,14 +157,14 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
explain = other.explain;
version = other.version;
trackScores = other.trackScores;
if (other.storedFieldNames != null) {
storedFieldNames = new ArrayList<>(other.storedFieldNames);
if (other.storedFieldsContext != null) {
storedFieldsContext = new StoredFieldsContext(other.storedFieldsContext);
}
if (other.docValueFields != null) {
docValueFields = new ArrayList<>(other.docValueFields);
docValueFields = new ArrayList<> (other.docValueFields);
}
if (other.scriptFields != null) {
scriptFields = new HashSet<>(other.scriptFields);
scriptFields = new HashSet<> (other.scriptFields);
}
if (other.fetchSourceContext != null) {
fetchSourceContext = new FetchSourceContext(
@ -210,7 +211,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
explain = in.readBoolean();
version = in.readBoolean();
trackScores = in.readBoolean();
storedFieldNames = (List<String>) in.readGenericValue();
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
docValueFields = (List<String>) in.readGenericValue();
if (in.readBoolean()) {
int size = in.readVInt();
@ -248,14 +249,14 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
out.writeBoolean(explain);
out.writeBoolean(version);
out.writeBoolean(trackScores);
out.writeGenericValue(storedFieldNames);
out.writeOptionalWriteable(storedFieldsContext);
out.writeGenericValue(docValueFields);
boolean hasScriptFields = scriptFields != null;
out.writeBoolean(hasScriptFields);
if (hasScriptFields) {
out.writeVInt(scriptFields.size());
for (ScriptField scriptField : scriptFields) {
scriptField.writeTo(out);;
scriptField.writeTo(out);
}
}
out.writeOptionalStreamable(fetchSourceContext);
@ -343,39 +344,42 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
/**
* Gets the stored fields to load and return.
*
* @deprecated Use {@link InnerHitBuilder#getStoredFieldNames()} instead.
* @deprecated Use {@link InnerHitBuilder#getStoredFieldsContext()} instead.
*/
@Deprecated
public List<String> getFieldNames() {
return storedFieldNames;
return storedFieldsContext == null ? null : storedFieldsContext.fieldNames();
}
/**
* Sets the stored fields to load and return. If none
* are specified, the source of the document will be returned.
* Sets the stored fields to load and return.
* If none are specified, the source of the document will be returned.
*
* @deprecated Use {@link InnerHitBuilder#setStoredFieldNames(List)} instead.
*/
@Deprecated
public InnerHitBuilder setFieldNames(List<String> fieldNames) {
this.storedFieldNames = fieldNames;
return this;
return setStoredFieldNames(fieldNames);
}
/**
* Gets the stored fields to load and return.
* Gets the stored fields context.
*/
public List<String> getStoredFieldNames() {
return storedFieldNames;
public StoredFieldsContext getStoredFieldsContext() {
return storedFieldsContext;
}
/**
* Sets the stored fields to load and return. If none
* are specified, the source of the document will be returned.
* Sets the stored fields to load and return.
* If none are specified, the source of the document will be returned.
*/
public InnerHitBuilder setStoredFieldNames(List<String> fieldNames) {
this.storedFieldNames = fieldNames;
if (storedFieldsContext == null) {
storedFieldsContext = StoredFieldsContext.fromList(fieldNames);
} else {
storedFieldsContext.addFieldNames(fieldNames);
}
return this;
}
@ -564,14 +568,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
innerHitsContext.explain(explain);
innerHitsContext.version(version);
innerHitsContext.trackScores(trackScores);
if (storedFieldNames != null) {
if (storedFieldNames.isEmpty()) {
innerHitsContext.emptyFieldNames();
} else {
for (String fieldName : storedFieldNames) {
innerHitsContext.fieldNames().add(fieldName);
}
}
if (storedFieldsContext != null) {
innerHitsContext.storedFieldsContext(storedFieldsContext);
}
if (docValueFields != null) {
DocValueFieldsContext docValueFieldsContext = innerHitsContext
@ -633,16 +631,8 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
if (fetchSourceContext != null) {
builder.field(SearchSourceBuilder._SOURCE_FIELD.getPreferredName(), fetchSourceContext, params);
}
if (storedFieldNames != null) {
if (storedFieldNames.size() == 1) {
builder.field(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), storedFieldNames.get(0));
} else {
builder.startArray(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName());
for (String fieldName : storedFieldNames) {
builder.value(fieldName);
}
builder.endArray();
}
if (storedFieldsContext != null) {
storedFieldsContext.toXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), builder);
}
if (docValueFields != null) {
builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName());
@ -693,7 +683,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
Objects.equals(explain, that.explain) &&
Objects.equals(version, that.version) &&
Objects.equals(trackScores, that.trackScores) &&
Objects.equals(storedFieldNames, that.storedFieldNames) &&
Objects.equals(storedFieldsContext, that.storedFieldsContext) &&
Objects.equals(docValueFields, that.docValueFields) &&
Objects.equals(scriptFields, that.scriptFields) &&
Objects.equals(fetchSourceContext, that.fetchSourceContext) &&
@ -705,7 +695,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
@Override
public int hashCode() {
return Objects.hash(name, nestedPath, parentChildType, from, size, explain, version, trackScores, storedFieldNames,
return Objects.hash(name, nestedPath, parentChildType, from, size, explain, version, trackScores, storedFieldsContext,
docValueFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, query, childInnerHits);
}

View File

@ -35,7 +35,7 @@ import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.mapper.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
import java.io.IOException;
@ -60,7 +60,7 @@ public class GeoDistanceRangeQuery extends Query {
private final IndexGeoPointFieldData indexFieldData;
public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower,
boolean includeUpper, GeoDistance geoDistance, GeoPointFieldMapperLegacy.GeoPointFieldType fieldType,
boolean includeUpper, GeoDistance geoDistance, LegacyGeoPointFieldMapper.GeoPointFieldType fieldType,
IndexGeoPointFieldData indexFieldData, String optimizeBbox) {
this.lat = point.lat();
this.lon = point.lon();
@ -93,7 +93,7 @@ public class GeoDistanceRangeQuery extends Query {
if ("memory".equals(optimizeBbox)) {
boundingBoxFilter = null;
} else if ("indexed".equals(optimizeBbox)) {
boundingBoxFilter = IndexedGeoBoundingBoxQuery.create(distanceBoundingCheck.topLeft(),
boundingBoxFilter = LegacyIndexedGeoBoundingBoxQuery.create(distanceBoundingCheck.topLeft(),
distanceBoundingCheck.bottomRight(), fieldType);
distanceBoundingCheck = GeoDistance.ALWAYS_INSTANCE; // fine, we do the bounding box check using the filter
} else {

View File

@ -34,15 +34,17 @@ import java.util.Objects;
/**
*
* @deprecated This query is no longer used for geo_point indexes created after version 2.1
*/
public class InMemoryGeoBoundingBoxQuery extends Query {
@Deprecated
public class LegacyInMemoryGeoBoundingBoxQuery extends Query {
private final GeoPoint topLeft;
private final GeoPoint bottomRight;
private final IndexGeoPointFieldData indexFieldData;
public InMemoryGeoBoundingBoxQuery(GeoPoint topLeft, GeoPoint bottomRight, IndexGeoPointFieldData indexFieldData) {
public LegacyInMemoryGeoBoundingBoxQuery(GeoPoint topLeft, GeoPoint bottomRight, IndexGeoPointFieldData indexFieldData) {
this.topLeft = topLeft;
this.bottomRight = bottomRight;
this.indexFieldData = indexFieldData;
@ -87,7 +89,7 @@ public class InMemoryGeoBoundingBoxQuery extends Query {
if (sameClassAs(obj) == false) {
return false;
}
InMemoryGeoBoundingBoxQuery other = (InMemoryGeoBoundingBoxQuery) obj;
LegacyInMemoryGeoBoundingBoxQuery other = (LegacyInMemoryGeoBoundingBoxQuery) obj;
return fieldName().equalsIgnoreCase(other.fieldName())
&& topLeft.equals(other.topLeft)
&& bottomRight.equals(other.bottomRight);

View File

@ -24,15 +24,19 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.mapper.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.mapper.LegacyGeoPointFieldMapper;
/**
*
* @deprecated This query is no longer used for geo_point indexes created after version 2.1
*/
public class IndexedGeoBoundingBoxQuery {
@Deprecated
public class LegacyIndexedGeoBoundingBoxQuery {
public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapperLegacy.GeoPointFieldType fieldType) {
public static Query create(GeoPoint topLeft, GeoPoint bottomRight, LegacyGeoPointFieldMapper.GeoPointFieldType fieldType) {
if (!fieldType.isLatLonEnabled()) {
throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.name() + "], can't use indexed filter on it");
throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.name()
+ "], can't use indexed filter on it");
}
//checks to see if bounding box crosses 180 degrees
if (topLeft.lon() > bottomRight.lon()) {
@ -42,7 +46,8 @@ public class IndexedGeoBoundingBoxQuery {
}
}
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapperLegacy.GeoPointFieldType fieldType) {
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight,
LegacyGeoPointFieldMapper.GeoPointFieldType fieldType) {
BooleanQuery.Builder filter = new BooleanQuery.Builder();
filter.setMinimumNumberShouldMatch(1);
filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true), Occur.SHOULD);
@ -51,7 +56,8 @@ public class IndexedGeoBoundingBoxQuery {
return new ConstantScoreQuery(filter.build());
}
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapperLegacy.GeoPointFieldType fieldType) {
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight,
LegacyGeoPointFieldMapper.GeoPointFieldType fieldType) {
BooleanQuery.Builder filter = new BooleanQuery.Builder();
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST);
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);

View File

@ -85,7 +85,7 @@ final class LocalShardSnapshot implements Closeable {
}
@Override
public void renameFile(String source, String dest) throws IOException {
public void rename(String source, String dest) throws IOException {
throw new UnsupportedOperationException("this directory is read-only");
}

View File

@ -128,25 +128,14 @@ public final class SimilarityService extends AbstractIndexComponent {
static class PerFieldSimilarity extends PerFieldSimilarityWrapper {
private final Similarity defaultSimilarity;
private final Similarity baseSimilarity;
private final MapperService mapperService;
PerFieldSimilarity(Similarity defaultSimilarity, Similarity baseSimilarity, MapperService mapperService) {
super(baseSimilarity);
this.defaultSimilarity = defaultSimilarity;
this.baseSimilarity = baseSimilarity;
this.mapperService = mapperService;
}
@Override
public float coord(int overlap, int maxOverlap) {
return baseSimilarity.coord(overlap, maxOverlap);
}
@Override
public float queryNorm(float valueForNormalization) {
return baseSimilarity.queryNorm(valueForNormalization);
}
@Override
public Similarity get(String name) {
MappedFieldType fieldType = mapperService.fullName(name);

View File

@ -245,7 +245,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
/**
* Renames all the given files form the key of the map to the
* Renames all the given files from the key of the map to the
* value of the map. All successfully renamed files are removed from the map in-place.
*/
public void renameTempFilesSafe(Map<String, String> tempFileMap) throws IOException {
@ -282,10 +282,11 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
logger.debug("failed to delete file [{}]", ex, origFile);
}
// now, rename the files... and fail it it won't work
this.renameFile(tempFile, origFile);
directory.rename(tempFile, origFile);
final String remove = tempFileMap.remove(tempFile);
assert remove != null;
}
directory.syncMetaData();
} finally {
metadataLock.writeLock().unlock();
}
@ -297,11 +298,6 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref
return statsCache.getOrRefresh();
}
public void renameFile(String from, String to) throws IOException {
ensureOpen();
directory.renameFile(from, to);
}
/**
* Increments the refCount of this Store instance. RefCounts are used to determine when a
* Store can be closed safely, i.e. as soon as there are no more references. Be sure to always call a

View File

@ -82,16 +82,9 @@ public class TermVectorsService {
Engine.GetResult get = indexShard.get(new Engine.Get(request.realtime(), uidTerm).version(request.version()).versionType(request.versionType()));
Fields termVectorsByField = null;
boolean docFromTranslog = get.source() != null;
AggregatedDfs dfs = null;
TermVectorsFilter termVectorsFilter = null;
/* fetched from translog is treated as an artificial document */
if (docFromTranslog) {
request.doc(get.source().source, false);
termVectorsResponse.setDocVersion(get.version());
}
/* handle potential wildcards in fields */
if (request.selectedFields() != null) {
handleFieldWildcards(indexShard, request);
@ -103,12 +96,12 @@ public class TermVectorsService {
Versions.DocIdAndVersion docIdAndVersion = get.docIdAndVersion();
/* from an artificial document */
if (request.doc() != null) {
termVectorsByField = generateTermVectorsFromDoc(indexShard, request, !docFromTranslog);
termVectorsByField = generateTermVectorsFromDoc(indexShard, request);
// if no document indexed in shard, take the queried document itself for stats
if (topLevelFields == null) {
topLevelFields = termVectorsByField;
}
termVectorsResponse.setArtificial(!docFromTranslog);
termVectorsResponse.setArtificial(true);
termVectorsResponse.setExists(true);
}
/* or from an existing document */
@ -252,7 +245,7 @@ public class TermVectorsService {
return MultiFields.getFields(index.createSearcher().getIndexReader());
}
private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request, boolean doAllFields) throws IOException {
private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request) throws IOException {
// parse the document, at the moment we do update the mapping, just like percolate
ParsedDocument parsedDocument = parseDocument(indexShard, indexShard.shardId().getIndexName(), request.type(), request.doc());
@ -265,9 +258,6 @@ public class TermVectorsService {
if (!isValidField(fieldType)) {
continue;
}
if (request.selectedFields() == null && !doAllFields && !fieldType.storeTermVectors()) {
continue;
}
if (request.selectedFields() != null && !request.selectedFields().contains(field.name())) {
continue;
}

View File

@ -22,9 +22,7 @@ package org.elasticsearch.index.translog;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TwoPhaseCommit;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesArray;
@ -55,8 +53,6 @@ import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ScheduledFuture;
@ -393,7 +389,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
* the current or from the currently committing translog file. If the location is in a file that has already
* been closed or even removed the method will return <code>null</code> instead.
*/
public Translog.Operation read(Location location) {
Translog.Operation read(Location location) { // TODO this is only here for testing - we can remove it?
try (ReleasableLock lock = readLock.acquire()) {
final BaseTranslogReader reader;
final long currentGeneration = current.getGeneration();
@ -655,9 +651,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
}
public static class Location implements Accountable, Comparable<Location> {
private static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Location.class);
public static class Location implements Comparable<Location> {
public final long generation;
public final long translogLocation;
@ -669,17 +663,6 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
this.size = size;
}
@Override
public long ramBytesUsed() {
return RAM_BYTES_USED;
}
@Override
public Collection<Accountable> getChildResources() {
return Collections.emptyList();
}
@Override
public String toString() {
return "[generation: " + generation + ", location: " + translogLocation + ", size: " + size + "]";
}
@ -1194,7 +1177,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
}
@Override
public void prepareCommit() throws IOException {
public long prepareCommit() throws IOException {
try (ReleasableLock lock = writeLock.acquire()) {
ensureOpen();
if (currentCommittingGeneration != NOT_SET_GENERATION) {
@ -1217,10 +1200,11 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
IOUtils.closeWhileHandlingException(this); // tragic event
throw e;
}
return 0L;
}
@Override
public void commit() throws IOException {
public long commit() throws IOException {
try (ReleasableLock lock = writeLock.acquire()) {
ensureOpen();
if (currentCommittingGeneration == NOT_SET_GENERATION) {
@ -1233,6 +1217,7 @@ public class Translog extends AbstractIndexShardComponent implements IndexShardC
currentCommittingGeneration = NOT_SET_GENERATION;
trimUnreferencedReaders();
}
return 0;
}
void trimUnreferencedReaders() {

View File

@ -19,6 +19,7 @@
package org.elasticsearch.indices.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.ar.ArabicAnalyzer;
import org.apache.lucene.analysis.bg.BulgarianAnalyzer;
import org.apache.lucene.analysis.br.BrazilianAnalyzer;
@ -58,7 +59,6 @@ import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.sv.SwedishAnalyzer;
import org.apache.lucene.analysis.th.ThaiAnalyzer;
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.Version;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.index.analysis.PatternAnalyzer;

View File

@ -18,6 +18,9 @@
*/
package org.elasticsearch.indices.analysis;
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.LowerCaseFilter;
import org.apache.lucene.analysis.StopFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ar.ArabicNormalizationFilter;
import org.apache.lucene.analysis.ar.ArabicStemFilter;
@ -27,9 +30,7 @@ import org.apache.lucene.analysis.cjk.CJKWidthFilter;
import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter;
import org.apache.lucene.analysis.commongrams.CommonGramsFilter;
import org.apache.lucene.analysis.core.DecimalDigitFilter;
import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.core.StopAnalyzer;
import org.apache.lucene.analysis.core.StopFilter;
import org.apache.lucene.analysis.core.UpperCaseFilter;
import org.apache.lucene.analysis.cz.CzechStemFilter;
import org.apache.lucene.analysis.de.GermanNormalizationFilter;
@ -60,7 +61,6 @@ import org.apache.lucene.analysis.snowball.SnowballFilter;
import org.apache.lucene.analysis.standard.ClassicFilter;
import org.apache.lucene.analysis.standard.StandardFilter;
import org.apache.lucene.analysis.tr.ApostropheFilter;
import org.apache.lucene.analysis.util.CharArraySet;
import org.apache.lucene.analysis.util.ElisionFilter;
import org.elasticsearch.Version;
import org.elasticsearch.index.analysis.DelimitedPayloadTokenFilterFactory;

View File

@ -33,6 +33,7 @@ import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.RateLimiter;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
@ -1636,6 +1637,11 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
} else {
stream = new RateLimitingInputStream(partSliceStream, restoreRateLimiter, restoreRateLimitingTimeInNanos::inc);
}
// TODO: why does the target file sometimes already exist? Simon says: I think, this can happen if you fail a shard and
// it's not cleaned up yet, the restore process tries to reuse files
IOUtils.deleteFilesIgnoringExceptions(store.directory(), fileInfo.physicalName());
try (final IndexOutput indexOutput = store.createVerifyingOutput(fileInfo.physicalName(), fileInfo.metadata(), IOContext.DEFAULT)) {
final byte[] buffer = new byte[BUFFER_SIZE];
int length;

View File

@ -24,7 +24,6 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
@ -33,7 +32,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController;
@ -42,13 +40,12 @@ import org.elasticsearch.rest.action.RestActions;
import org.elasticsearch.rest.action.RestStatusToXContentListener;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode;
import java.io.IOException;
@ -178,18 +175,11 @@ public class RestSearchAction extends BaseRestHandler {
"if the field is not stored");
}
String sField = request.param("stored_fields");
if (sField != null) {
if (!Strings.hasText(sField)) {
searchSourceBuilder.noStoredFields();
} else {
String[] sFields = Strings.splitStringByCommaToArray(sField);
if (sFields != null) {
for (String field : sFields) {
searchSourceBuilder.storedField(field);
}
}
}
StoredFieldsContext storedFieldsContext =
StoredFieldsContext.fromRestRequest(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), request);
if (storedFieldsContext != null) {
searchSourceBuilder.storedFields(storedFieldsContext);
}
String sDocValueFields = request.param("docvalue_fields");
if (sDocValueFields == null) {

View File

@ -67,8 +67,8 @@ import org.elasticsearch.search.fetch.QueryFetchSearchResult;
import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult;
import org.elasticsearch.search.fetch.ShardFetchRequest;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.DefaultSearchContext;
@ -729,9 +729,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
throw new SearchContextException(context, "failed to create RescoreSearchContext", e);
}
}
if (source.storedFields() != null) {
context.fieldNames().addAll(source.storedFields());
}
if (source.explain() != null) {
context.explain(source.explain());
}
@ -823,6 +820,18 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
}
context.sliceBuilder(source.slice());
}
if (source.storedFields() != null) {
if (source.storedFields().fetchFields() == false) {
if (context.version()) {
throw new SearchContextException(context, "`stored_fields` cannot be disabled if version is requested");
}
if (context.sourceRequested()) {
throw new SearchContextException(context, "`stored_fields` cannot be disabled if _source is requested");
}
}
context.storedFieldsContext(source.storedFields());
}
}
/**

View File

@ -37,6 +37,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation.Type;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.sort.ScoreSortBuilder;
@ -63,7 +64,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
private boolean trackScores = false;
private List<SortBuilder<?>> sorts = null;
private HighlightBuilder highlightBuilder;
private List<String> fieldNames;
private StoredFieldsContext storedFieldsContext;
private List<String> fieldDataFields;
private Set<ScriptField> scriptFields;
private FetchSourceContext fetchSourceContext;
@ -86,13 +87,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
fieldDataFields.add(in.readString());
}
}
if (in.readBoolean()) {
int size = in.readVInt();
fieldNames = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
fieldNames.add(in.readString());
}
}
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
from = in.readVInt();
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
if (in.readBoolean()) {
@ -126,14 +121,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
out.writeString(fieldName);
}
}
boolean hasFieldNames = fieldNames != null;
out.writeBoolean(hasFieldNames);
if (hasFieldNames) {
out.writeVInt(fieldNames.size());
for (String fieldName : fieldNames) {
out.writeString(fieldName);
}
}
out.writeOptionalWriteable(storedFieldsContext);
out.writeVInt(from);
out.writeOptionalWriteable(highlightBuilder);
boolean hasScriptFields = scriptFields != null;
@ -355,47 +343,34 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
}
/**
* Adds a field to load and return (note, it must be stored) as part of
* the search request. If none are specified, the source of the document
* will be return.
* Adds a stored field to load and return (note, it must be stored) as part of the search request.
* To disable the stored fields entirely (source and metadata fields) use {@code storedField("_none_")}.
*/
public TopHitsAggregationBuilder field(String field) {
if (field == null) {
throw new IllegalArgumentException("[field] must not be null: [" + name + "]");
}
if (fieldNames == null) {
fieldNames = new ArrayList<>();
}
fieldNames.add(field);
return this;
public TopHitsAggregationBuilder storedField(String field) {
return storedFields(Collections.singletonList(field));
}
/**
* Sets the fields to load and return as part of the search request. If
* none are specified, the source of the document will be returned.
* Sets the stored fields to load and return as part of the search request.
* To disable the stored fields entirely (source and metadata fields) use {@code storedField("_none_")}.
*/
public TopHitsAggregationBuilder fields(List<String> fields) {
public TopHitsAggregationBuilder storedFields(List<String> fields) {
if (fields == null) {
throw new IllegalArgumentException("[fields] must not be null: [" + name + "]");
}
this.fieldNames = fields;
if (storedFieldsContext == null) {
storedFieldsContext = StoredFieldsContext.fromList(fields);
} else {
storedFieldsContext.addFieldNames(fields);
}
return this;
}
/**
* Sets no fields to be loaded, resulting in only id and type to be
* returned per field.
* Gets the stored fields context
*/
public TopHitsAggregationBuilder noFields() {
this.fieldNames = Collections.emptyList();
return this;
}
/**
* Gets the fields to load and return as part of the search request.
*/
public List<String> fields() {
return fieldNames;
public StoredFieldsContext storedFields() {
return storedFieldsContext;
}
/**
@ -552,8 +527,9 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
@Override
protected TopHitsAggregatorFactory doBuild(AggregationContext context, AggregatorFactory<?> parent, Builder subfactoriesBuilder)
throws IOException {
return new TopHitsAggregatorFactory(name, type, from, size, explain, version, trackScores, sorts, highlightBuilder, fieldNames,
fieldDataFields, scriptFields, fetchSourceContext, context, parent, subfactoriesBuilder, metaData);
return new TopHitsAggregatorFactory(name, type, from, size, explain, version, trackScores, sorts, highlightBuilder,
storedFieldsContext, fieldDataFields, scriptFields, fetchSourceContext, context,
parent, subfactoriesBuilder, metaData);
}
@Override
@ -566,16 +542,8 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
if (fetchSourceContext != null) {
builder.field(SearchSourceBuilder._SOURCE_FIELD.getPreferredName(), fetchSourceContext);
}
if (fieldNames != null) {
if (fieldNames.size() == 1) {
builder.field(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), fieldNames.get(0));
} else {
builder.startArray(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName());
for (String fieldName : fieldNames) {
builder.value(fieldName);
}
builder.endArray();
}
if (storedFieldsContext != null) {
storedFieldsContext.toXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), builder);
}
if (fieldDataFields != null) {
builder.startArray(SearchSourceBuilder.DOCVALUE_FIELDS_FIELD.getPreferredName());
@ -630,9 +598,8 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder._SOURCE_FIELD)) {
factory.fetchSource(FetchSourceContext.parse(context));
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.STORED_FIELDS_FIELD)) {
List<String> fieldNames = new ArrayList<>();
fieldNames.add(parser.text());
factory.fields(fieldNames);
factory.storedFieldsContext =
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.SORT_FIELD)) {
factory.sort(parser.text());
} else {
@ -696,16 +663,8 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
} else if (token == XContentParser.Token.START_ARRAY) {
if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.STORED_FIELDS_FIELD)) {
List<String> fieldNames = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) {
fieldNames.add(parser.text());
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING
+ "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
}
}
factory.fields(fieldNames);
factory.storedFieldsContext =
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
} else if (context.getParseFieldMatcher().match(currentFieldName, SearchSourceBuilder.DOCVALUE_FIELDS_FIELD)) {
List<String> fieldDataFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
@ -736,8 +695,8 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
@Override
protected int doHashCode() {
return Objects.hash(explain, fetchSourceContext, fieldDataFields, fieldNames, from, highlightBuilder, scriptFields, size, sorts,
trackScores, version);
return Objects.hash(explain, fetchSourceContext, fieldDataFields, storedFieldsContext, from, highlightBuilder,
scriptFields, size, sorts, trackScores, version);
}
@Override
@ -746,7 +705,7 @@ public class TopHitsAggregationBuilder extends AbstractAggregationBuilder<TopHit
return Objects.equals(explain, other.explain)
&& Objects.equals(fetchSourceContext, other.fetchSourceContext)
&& Objects.equals(fieldDataFields, other.fieldDataFields)
&& Objects.equals(fieldNames, other.fieldNames)
&& Objects.equals(storedFieldsContext, other.storedFieldsContext)
&& Objects.equals(from, other.from)
&& Objects.equals(highlightBuilder, other.highlightBuilder)
&& Objects.equals(scriptFields, other.scriptFields)

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.aggregations.metrics.tophits;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.Aggregator;
@ -29,9 +30,9 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.SubSearchContext;
import org.elasticsearch.search.sort.SortAndFormats;
@ -53,15 +54,16 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
private final boolean trackScores;
private final List<SortBuilder<?>> sorts;
private final HighlightBuilder highlightBuilder;
private final List<String> fieldNames;
private final StoredFieldsContext storedFieldsContext;
private final List<String> docValueFields;
private final Set<ScriptField> scriptFields;
private final FetchSourceContext fetchSourceContext;
public TopHitsAggregatorFactory(String name, Type type, int from, int size, boolean explain, boolean version, boolean trackScores,
List<SortBuilder<?>> sorts, HighlightBuilder highlightBuilder, List<String> fieldNames, List<String> docValueFields,
Set<ScriptField> scriptFields, FetchSourceContext fetchSourceContext, AggregationContext context, AggregatorFactory<?> parent,
AggregatorFactories.Builder subFactories, Map<String, Object> metaData) throws IOException {
List<SortBuilder<?>> sorts, HighlightBuilder highlightBuilder, StoredFieldsContext storedFieldsContext,
List<String> docValueFields, Set<ScriptField> scriptFields, FetchSourceContext fetchSourceContext,
AggregationContext context, AggregatorFactory<?> parent, AggregatorFactories.Builder subFactories,
Map<String, Object> metaData) throws IOException {
super(name, type, context, parent, subFactories, metaData);
this.from = from;
this.size = size;
@ -70,7 +72,7 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
this.trackScores = trackScores;
this.sorts = sorts;
this.highlightBuilder = highlightBuilder;
this.fieldNames = fieldNames;
this.storedFieldsContext = storedFieldsContext;
this.docValueFields = docValueFields;
this.scriptFields = scriptFields;
this.fetchSourceContext = fetchSourceContext;
@ -92,8 +94,8 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
subSearchContext.sort(optionalSort.get());
}
}
if (fieldNames != null) {
subSearchContext.fieldNames().addAll(fieldNames);
if (storedFieldsContext != null) {
subSearchContext.storedFieldsContext(storedFieldsContext);
}
if (docValueFields != null) {
DocValueFieldsContext docValueFieldsContext = subSearchContext

View File

@ -44,6 +44,7 @@ import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.internal.SearchContext;
@ -148,7 +149,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
private TimeValue timeout = null;
private int terminateAfter = SearchContext.DEFAULT_TERMINATE_AFTER;
private List<String> storedFieldNames;
private StoredFieldsContext storedFieldsContext;
private List<String> docValueFields;
private List<ScriptField> scriptFields;
private FetchSourceContext fetchSourceContext;
@ -184,7 +185,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
explain = in.readOptionalBoolean();
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new);
docValueFields = (List<String>) in.readGenericValue();
storedFieldNames = (List<String>) in.readGenericValue();
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
from = in.readVInt();
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
boolean hasIndexBoost = in.readBoolean();
@ -244,7 +245,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
out.writeOptionalBoolean(explain);
out.writeOptionalStreamable(fetchSourceContext);
out.writeGenericValue(docValueFields);
out.writeGenericValue(storedFieldNames);
out.writeOptionalWriteable(storedFieldsContext);
out.writeVInt(from);
out.writeOptionalWriteable(highlightBuilder);
boolean hasIndexBoost = indexBoost != null;
@ -711,11 +712,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
* return.
*/
public SearchSourceBuilder storedField(String name) {
if (storedFieldNames == null) {
storedFieldNames = new ArrayList<>();
}
storedFieldNames.add(name);
return this;
return storedFields(Collections.singletonList(name));
}
/**
@ -723,24 +720,27 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
* are specified, the source of the document will be returned.
*/
public SearchSourceBuilder storedFields(List<String> fields) {
this.storedFieldNames = fields;
if (storedFieldsContext == null) {
storedFieldsContext = StoredFieldsContext.fromList(fields);
} else {
storedFieldsContext.addFieldNames(fields);
}
return this;
}
/**
* Sets no stored fields to be loaded, resulting in only id and type to be returned
* per field.
* Indicates how the stored fields should be fetched.
*/
public SearchSourceBuilder noStoredFields() {
this.storedFieldNames = Collections.emptyList();
public SearchSourceBuilder storedFields(StoredFieldsContext context) {
storedFieldsContext = context;
return this;
}
/**
* Gets the stored fields to load and return as part of the search request.
* Gets the stored fields context.
*/
public List<String> storedFields() {
return storedFieldNames;
public StoredFieldsContext storedFields() {
return storedFieldsContext;
}
@ -912,7 +912,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
rewrittenBuilder.ext = ext;
rewrittenBuilder.fetchSourceContext = fetchSourceContext;
rewrittenBuilder.docValueFields = docValueFields;
rewrittenBuilder.storedFieldNames = storedFieldNames;
rewrittenBuilder.storedFieldsContext = storedFieldsContext;
rewrittenBuilder.from = from;
rewrittenBuilder.highlightBuilder = highlightBuilder;
rewrittenBuilder.indexBoost = indexBoost;
@ -973,7 +973,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
fetchSourceContext = FetchSourceContext.parse(context);
} else if (context.getParseFieldMatcher().match(currentFieldName, STORED_FIELDS_FIELD)) {
storedField(parser.text());
storedFieldsContext =
StoredFieldsContext.fromXContent(SearchSourceBuilder.STORED_FIELDS_FIELD.getPreferredName(), context);
} else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
sort(parser.text());
} else if (context.getParseFieldMatcher().match(currentFieldName, PROFILE_FIELD)) {
@ -1033,15 +1034,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
}
} else if (token == XContentParser.Token.START_ARRAY) {
if (context.getParseFieldMatcher().match(currentFieldName, STORED_FIELDS_FIELD)) {
storedFieldNames = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) {
storedFieldNames.add(parser.text());
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in ["
+ currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
}
}
storedFieldsContext = StoredFieldsContext.fromXContent(STORED_FIELDS_FIELD.getPreferredName(), context);
} else if (context.getParseFieldMatcher().match(currentFieldName, DOCVALUE_FIELDS_FIELD)) {
docValueFields = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
@ -1141,16 +1134,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
builder.field(_SOURCE_FIELD.getPreferredName(), fetchSourceContext);
}
if (storedFieldNames != null) {
if (storedFieldNames.size() == 1) {
builder.field(STORED_FIELDS_FIELD.getPreferredName(), storedFieldNames.get(0));
} else {
builder.startArray(STORED_FIELDS_FIELD.getPreferredName());
for (String fieldName : storedFieldNames) {
builder.value(fieldName);
}
builder.endArray();
}
if (storedFieldsContext != null) {
storedFieldsContext.toXContent(STORED_FIELDS_FIELD.getPreferredName(), builder);
}
if (docValueFields != null) {
@ -1349,7 +1334,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
@Override
public int hashCode() {
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldNames, from,
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from,
highlightBuilder, indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields,
size, sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, profile);
}
@ -1367,7 +1352,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
&& Objects.equals(explain, other.explain)
&& Objects.equals(fetchSourceContext, other.fetchSourceContext)
&& Objects.equals(docValueFields, other.docValueFields)
&& Objects.equals(storedFieldNames, other.storedFieldNames)
&& Objects.equals(storedFieldsContext, other.storedFieldsContext)
&& Objects.equals(from, other.from)
&& Objects.equals(highlightBuilder, other.highlightBuilder)
&& Objects.equals(indexBoost, other.indexBoost)

View File

@ -92,19 +92,22 @@ public class FetchPhase implements SearchPhase {
@Override
public void execute(SearchContext context) {
FieldsVisitor fieldsVisitor;
final FieldsVisitor fieldsVisitor;
Set<String> fieldNames = null;
List<String> fieldNamePatterns = null;
if (!context.hasFieldNames()) {
StoredFieldsContext storedFieldsContext = context.storedFieldsContext();
if (storedFieldsContext == null) {
// no fields specified, default to return source if no explicit indication
if (!context.hasScriptFields() && !context.hasFetchSourceContext()) {
context.fetchSourceContext(new FetchSourceContext(true));
}
fieldsVisitor = new FieldsVisitor(context.sourceRequested());
} else if (context.fieldNames().isEmpty()) {
fieldsVisitor = new FieldsVisitor(context.sourceRequested());
} else if (storedFieldsContext.fetchFields() == false) {
// disable stored fields entirely
fieldsVisitor = null;
} else {
for (String fieldName : context.fieldNames()) {
for (String fieldName : context.storedFieldsContext().fieldNames()) {
if (fieldName.equals(SourceFieldMapper.NAME)) {
if (context.hasFetchSourceContext()) {
context.fetchSourceContext().fetchSource(true);
@ -133,8 +136,13 @@ public class FetchPhase implements SearchPhase {
}
}
boolean loadSource = context.sourceRequested();
fieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames,
fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, loadSource);
if (fieldNames == null && fieldNamePatterns == null) {
// empty list specified, default to disable _source if no explicit indication
fieldsVisitor = new FieldsVisitor(loadSource);
} else {
fieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames,
fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, loadSource);
}
}
InternalSearchHit[] hits = new InternalSearchHit[context.docIdsToLoadSize()];
@ -182,6 +190,9 @@ public class FetchPhase implements SearchPhase {
}
private InternalSearchHit createSearchHit(SearchContext context, FieldsVisitor fieldsVisitor, int docId, int subDocId, LeafReaderContext subReaderContext) {
if (fieldsVisitor == null) {
return new InternalSearchHit(docId);
}
loadStoredFields(context, subReaderContext, fieldsVisitor, subDocId);
fieldsVisitor.postProcess(context.mapperService());
@ -273,9 +284,9 @@ public class FetchPhase implements SearchPhase {
private Map<String, SearchHitField> getSearchFields(SearchContext context, int nestedSubDocId, Set<String> fieldNames, List<String> fieldNamePatterns, LeafReaderContext subReaderContext) {
Map<String, SearchHitField> searchFields = null;
if (context.hasFieldNames() && !context.fieldNames().isEmpty()) {
if (context.hasStoredFields() && !context.storedFieldsContext().fieldNames().isEmpty()) {
FieldsVisitor nestedFieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames,
fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, false);
fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, false);
if (nestedFieldsVisitor != null) {
loadStoredFields(context, subReaderContext, nestedFieldsVisitor, nestedSubDocId);
nestedFieldsVisitor.postProcess(context.mapperService());

View File

@ -0,0 +1,195 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.rest.RestRequest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* Context used to fetch the {@code stored_fields}.
*/
public class StoredFieldsContext implements Writeable {
public static final String _NONE_ = "_none_";
private final List<String> fieldNames;
private boolean fetchFields;
private StoredFieldsContext(boolean fetchFields) {
this.fetchFields = fetchFields;
this.fieldNames = null;
}
private StoredFieldsContext(List<String> fieldNames) {
Objects.requireNonNull(fieldNames, "fieldNames must not be null");
this.fetchFields = true;
this.fieldNames = new ArrayList<>(fieldNames);
}
public StoredFieldsContext(StoredFieldsContext other) {
this.fetchFields = other.fetchFields();
if (other.fieldNames() != null) {
this.fieldNames = new ArrayList<>(other.fieldNames());
} else {
this.fieldNames = null;
}
}
public StoredFieldsContext(StreamInput in) throws IOException {
this.fetchFields = in.readBoolean();
if (fetchFields) {
this.fieldNames = (List<String>) in.readGenericValue();
} else {
this.fieldNames = null;
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(fetchFields);
if (fetchFields) {
out.writeGenericValue(fieldNames);
}
}
/**
* Gets the field names to load and return as part of the search request.
*/
public List<String> fieldNames() {
return fieldNames;
}
/**
* Adds the field names {@code fieldNames} to the list of fields to load.
*/
public StoredFieldsContext addFieldNames(List<String> fieldNames) {
if (fetchFields == false || fieldNames.contains(_NONE_)) {
throw new IllegalArgumentException("cannot combine _none_ with other fields");
}
this.fieldNames.addAll(fieldNames);
return this;
}
/**
* Adds a field name {@code field} to the list of fields to load.
*/
public StoredFieldsContext addFieldName(String field) {
if (fetchFields == false || _NONE_.equals(field)) {
throw new IllegalArgumentException("cannot combine _none_ with other fields");
}
this.fieldNames.add(field);
return this;
}
/**
* Returns true if the stored fields should be fetched, false otherwise.
*/
public boolean fetchFields() {
return fetchFields;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
StoredFieldsContext that = (StoredFieldsContext) o;
if (fetchFields != that.fetchFields) return false;
return fieldNames != null ? fieldNames.equals(that.fieldNames) : that.fieldNames == null;
}
@Override
public int hashCode() {
int result = fieldNames != null ? fieldNames.hashCode() : 0;
result = 31 * result + (fetchFields ? 1 : 0);
return result;
}
public void toXContent(String preferredName, XContentBuilder builder) throws IOException {
if (fetchFields == false) {
builder.field(preferredName, _NONE_);
}
if (fieldNames != null) {
if (fieldNames.size() == 1) {
builder.field(preferredName, fieldNames.get(0));
} else {
builder.startArray(preferredName);
for (String fieldName : fieldNames) {
builder.value(fieldName);
}
builder.endArray();
}
}
}
public static StoredFieldsContext fromList(List<String> fieldNames) {
if (fieldNames.size() == 1 && _NONE_.equals(fieldNames.get(0))) {
return new StoredFieldsContext(false);
}
if (fieldNames.contains(_NONE_)) {
throw new IllegalArgumentException("cannot combine _none_ with other fields");
}
return new StoredFieldsContext(fieldNames);
}
public static StoredFieldsContext fromXContent(String fieldName, QueryParseContext context) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_STRING) {
return fromList(Collections.singletonList(parser.text()));
} else if (token == XContentParser.Token.START_ARRAY) {
ArrayList<String> list = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
list.add(parser.text());
}
return fromList(list);
} else {
throw new ParsingException(parser.getTokenLocation(),
"Expected [" + XContentParser.Token.VALUE_STRING + "] or ["
+ XContentParser.Token.START_ARRAY + "] in [" + fieldName + "] but found [" + token + "]",
parser.getTokenLocation());
}
}
public static StoredFieldsContext fromRestRequest(String name, RestRequest request) {
String sField = request.param(name);
if (sField != null) {
String[] sFields = Strings.splitStringByCommaToArray(sField);
return fromList(Arrays.asList(sFields));
}
return null;
}
}

View File

@ -38,6 +38,9 @@ public final class ParentFieldSubFetchPhase implements FetchSubPhase {
@Override
public void hitExecute(SearchContext context, HitContext hitContext) {
if (context.storedFieldsContext() != null && context.storedFieldsContext().fetchFields() == false) {
return ;
}
ParentFieldMapper parentFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).parentFieldMapper();
if (parentFieldMapper.active() == false) {
return;

View File

@ -31,7 +31,8 @@ public final class VersionFetchSubPhase implements FetchSubPhase {
@Override
public void hitExecute(SearchContext context, HitContext hitContext) {
if (context.version() == false) {
if (context.version() == false ||
(context.storedFieldsContext() != null && context.storedFieldsContext().fetchFields() == false)) {
return;
}
long version = Versions.NOT_FOUND;

View File

@ -50,6 +50,7 @@ import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
@ -106,7 +107,7 @@ public class DefaultSearchContext extends SearchContext {
private ScrollContext scrollContext;
private boolean explain;
private boolean version = false; // by default, we don't return versions
private List<String> fieldNames;
private StoredFieldsContext storedFields;
private ScriptFieldsContext scriptFields;
private FetchSourceContext fetchSourceContext;
private int from = -1;
@ -651,21 +652,29 @@ public class DefaultSearchContext extends SearchContext {
}
@Override
public boolean hasFieldNames() {
return fieldNames != null;
public boolean hasStoredFields() {
return storedFields != null && storedFields.fieldNames() != null;
}
@Override
public List<String> fieldNames() {
if (fieldNames == null) {
fieldNames = new ArrayList<>();
}
return fieldNames;
public boolean hasStoredFieldsContext() {
return storedFields != null;
}
@Override
public void emptyFieldNames() {
this.fieldNames = Collections.emptyList();
public StoredFieldsContext storedFieldsContext() {
return storedFields;
}
@Override
public SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext) {
this.storedFields = storedFieldsContext;
return this;
}
@Override
public boolean storedFieldsRequested() {
return storedFields == null || storedFields.fetchFields();
}
@Override

View File

@ -35,6 +35,7 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
@ -69,6 +70,31 @@ public abstract class FilteredSearchContext extends SearchContext {
this.in = in;
}
@Override
public boolean hasStoredFields() {
return in.hasStoredFields();
}
@Override
public boolean hasStoredFieldsContext() {
return in.hasStoredFieldsContext();
}
@Override
public boolean storedFieldsRequested() {
return in.storedFieldsRequested();
}
@Override
public StoredFieldsContext storedFieldsContext() {
return in.storedFieldsContext();
}
@Override
public SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext) {
return in.storedFieldsContext(storedFieldsContext);
}
@Override
protected void doClose() {
in.doClose();
@ -374,20 +400,6 @@ public abstract class FilteredSearchContext extends SearchContext {
return in.size(size);
}
@Override
public boolean hasFieldNames() {
return in.hasFieldNames();
}
@Override
public List<String> fieldNames() {
return in.fieldNames();
}
@Override
public void emptyFieldNames() {
in.emptyFieldNames();
}
@Override
public boolean explain() {

View File

@ -100,9 +100,17 @@ public class InternalSearchHit implements SearchHit {
}
public InternalSearchHit(int docId) {
this(docId, null, null, null);
}
public InternalSearchHit(int docId, String id, Text type, Map<String, SearchHitField> fields) {
this.docId = docId;
this.id = new Text(id);
if (id != null) {
this.id = new Text(id);
} else {
this.id = null;
}
this.type = type;
this.fields = fields;
}
@ -168,7 +176,7 @@ public class InternalSearchHit implements SearchHit {
@Override
public String id() {
return id.string();
return id != null ? id.string() : null;
}
@Override
@ -178,7 +186,7 @@ public class InternalSearchHit implements SearchHit {
@Override
public String type() {
return type.string();
return type != null ? type.string() : null;
}
@Override
@ -444,8 +452,12 @@ public class InternalSearchHit implements SearchHit {
if (shard != null) {
builder.field(Fields._INDEX, shard.indexText());
}
builder.field(Fields._TYPE, type);
builder.field(Fields._ID, id);
if (type != null) {
builder.field(Fields._TYPE, type);
}
if (id != null) {
builder.field(Fields._ID, id);
}
}
if (version != -1) {
builder.field(Fields._VERSION, version);
@ -555,8 +567,8 @@ public class InternalSearchHit implements SearchHit {
public void readFrom(StreamInput in, InternalSearchHits.StreamContext context) throws IOException {
score = in.readFloat();
id = in.readText();
type = in.readText();
id = in.readOptionalText();
type = in.readOptionalText();
nestedIdentity = in.readOptionalStreamable(InternalNestedIdentity::new);
version = in.readLong();
source = in.readBytesReference();
@ -664,8 +676,8 @@ public class InternalSearchHit implements SearchHit {
public void writeTo(StreamOutput out, InternalSearchHits.StreamContext context) throws IOException {
out.writeFloat(score);
out.writeText(id);
out.writeText(type);
out.writeOptionalText(id);
out.writeOptionalText(type);
out.writeOptionalStreamable(nestedIdentity);
out.writeLong(version);
out.writeBytesReference(source);

View File

@ -43,6 +43,7 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
@ -68,7 +69,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicBoolean;
/**
@ -291,11 +291,18 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
public abstract SearchContext size(int size);
public abstract boolean hasFieldNames();
public abstract boolean hasStoredFields();
public abstract List<String> fieldNames();
public abstract boolean hasStoredFieldsContext();
public abstract void emptyFieldNames();
/**
* A shortcut function to see whether there is a storedFieldsContext and it says the fields are requested.
*/
public abstract boolean storedFieldsRequested();
public abstract StoredFieldsContext storedFieldsContext();
public abstract SearchContext storedFieldsContext(StoredFieldsContext storedFieldsContext);
public abstract boolean explain();

Some files were not shown because too many files have changed in this diff Show More