Merge branch 'master' into enhancement/remove_node_client_setting

This commit is contained in:
javanna 2016-03-23 11:25:34 +01:00 committed by Luca Cavanna
commit 030453d320
251 changed files with 4233 additions and 3192 deletions

View File

@ -435,14 +435,12 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]AlreadyExpiredException.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]CompositeIndexEventListener.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexModule.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexSettings.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexingSlowLog.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicyConfig.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergeSchedulerConfig.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]NodeServicesProvider.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]SearchSlowLog.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]Analysis.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]AnalysisRegistry.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]AnalysisService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]CommonGramsTokenFilterFactory.java" checks="LineLength" />
@ -602,7 +600,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexSearcherWrapper.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexShard.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]IndexingStats.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]ShadowIndexShard.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]ShardPath.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]ShardStateMetaData.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]shard[/\\]StoreRecovery.java" checks="LineLength" />
@ -1119,7 +1116,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]fieldstats[/\\]FieldStatsIntegrationIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]fieldstats[/\\]FieldStatsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]AsyncShardFetchTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayIndexStateIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayMetaStateTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayModuleTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayServiceTests.java" checks="LineLength" />
@ -1140,7 +1136,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyPipeliningEnabledIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexModuleTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexServiceTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexSettingsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexWithShadowReplicasIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexingSlowLogTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicySettingsTests.java" checks="LineLength" />

View File

@ -135,6 +135,8 @@ final class Bootstrap {
JNANatives.trySetMaxNumberOfThreads();
JNANatives.trySetMaxSizeVirtualMemory();
// init lucene random seed. it will use /dev/urandom where available:
StringHelper.randomId();
}

View File

@ -123,6 +123,7 @@ final class BootstrapCheck {
if (Constants.LINUX) {
checks.add(new MaxNumberOfThreadsCheck());
}
checks.add(new MaxSizeVirtualMemoryCheck());
return Collections.unmodifiableList(checks);
}
@ -249,4 +250,32 @@ final class BootstrapCheck {
}
static class MaxSizeVirtualMemoryCheck implements Check {
@Override
public boolean check() {
return getMaxSizeVirtualMemory() != Long.MIN_VALUE && getMaxSizeVirtualMemory() != getRlimInfinity();
}
@Override
public String errorMessage() {
return String.format(
Locale.ROOT,
"max size virtual memory [%d] for user [%s] likely too low, increase to [unlimited]",
getMaxSizeVirtualMemory(),
BootstrapInfo.getSystemProperties().get("user.name"));
}
// visible for testing
long getRlimInfinity() {
return JNACLibrary.RLIM_INFINITY;
}
// visible for testing
long getMaxSizeVirtualMemory() {
return JNANatives.MAX_SIZE_VIRTUAL_MEMORY;
}
}
}

View File

@ -39,6 +39,7 @@ final class JNACLibrary {
public static final int MCL_CURRENT = 1;
public static final int ENOMEM = 12;
public static final int RLIMIT_MEMLOCK = Constants.MAC_OS_X ? 6 : 8;
public static final int RLIMIT_AS = Constants.MAC_OS_X ? 5 : 9;
public static final long RLIM_INFINITY = Constants.MAC_OS_X ? 9223372036854775807L : -1L;
static {

View File

@ -52,6 +52,8 @@ class JNANatives {
// the user ID that owns the running Elasticsearch process
static long MAX_NUMBER_OF_THREADS = -1;
static long MAX_SIZE_VIRTUAL_MEMORY = Long.MIN_VALUE;
static void tryMlockall() {
int errno = Integer.MIN_VALUE;
String errMsg = null;
@ -124,6 +126,17 @@ class JNANatives {
}
}
static void trySetMaxSizeVirtualMemory() {
if (Constants.LINUX || Constants.MAC_OS_X) {
final JNACLibrary.Rlimit rlimit = new JNACLibrary.Rlimit();
if (JNACLibrary.getrlimit(JNACLibrary.RLIMIT_AS, rlimit) == 0) {
MAX_SIZE_VIRTUAL_MEMORY = rlimit.rlim_cur.longValue();
} else {
logger.warn("unable to retrieve max size virtual memory [" + JNACLibrary.strerror(Native.getLastError()) + "]");
}
}
}
static String rlimitToString(long value) {
assert Constants.LINUX || Constants.MAC_OS_X;
if (value == JNACLibrary.RLIM_INFINITY) {

View File

@ -20,15 +20,12 @@ package org.elasticsearch.cluster.metadata;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.misc.IndexMergeTool;
import org.elasticsearch.Version;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.MergePolicyConfig;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.MapperService;
@ -36,10 +33,7 @@ import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.mapper.MapperRegistry;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.unmodifiableSet;
import static org.elasticsearch.common.util.set.Sets.newHashSet;
/**
@ -53,13 +47,13 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet;
public class MetaDataIndexUpgradeService extends AbstractComponent {
private final MapperRegistry mapperRegistry;
private final IndexScopedSettings indexScopedSettigns;
private final IndexScopedSettings indexScopedSettings;
@Inject
public MetaDataIndexUpgradeService(Settings settings, MapperRegistry mapperRegistry, IndexScopedSettings indexScopedSettings) {
super(settings);
this.mapperRegistry = mapperRegistry;
this.indexScopedSettigns = indexScopedSettings;
this.indexScopedSettings = indexScopedSettings;
}
/**
@ -182,39 +176,13 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
}
}
private static final String ARCHIVED_SETTINGS_PREFIX = "archived.";
IndexMetaData archiveBrokenIndexSettings(IndexMetaData indexMetaData) {
Settings settings = indexMetaData.getSettings();
Settings.Builder builder = Settings.builder();
boolean changed = false;
for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) {
try {
Setting<?> setting = indexScopedSettigns.get(entry.getKey());
if (setting != null) {
setting.get(settings);
builder.put(entry.getKey(), entry.getValue());
} else {
if (indexScopedSettigns.isPrivateSetting(entry.getKey()) || entry.getKey().startsWith(ARCHIVED_SETTINGS_PREFIX)) {
builder.put(entry.getKey(), entry.getValue());
} else {
changed = true;
logger.warn("[{}] found unknown index setting: {} value: {} - archiving", indexMetaData.getIndex(), entry.getKey(), entry.getValue());
// we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there
// but we want users to be aware that some of their setting are broken and they can research why and what they need to do to replace them.
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
}
}
} catch (IllegalArgumentException ex) {
changed = true;
logger.warn("[{}] found invalid index setting: {} value: {} - archiving",ex, indexMetaData.getIndex(), entry.getKey(), entry.getValue());
// we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there
// but we want users to be aware that some of their setting sare broken and they can research why and what they need to do to replace them.
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
}
final Settings settings = indexMetaData.getSettings();
final Settings upgrade = indexScopedSettings.archiveUnknownOrBrokenSettings(settings);
if (upgrade != settings) {
return IndexMetaData.builder(indexMetaData).settings(upgrade).build();
} else {
return indexMetaData;
}
return changed ? IndexMetaData.builder(indexMetaData).settings(builder.build()).build() : indexMetaData;
}
}

View File

@ -1000,4 +1000,8 @@ public class ClusterService extends AbstractLifecycleComponent<ClusterService> {
}
}
}
public ClusterSettings getClusterSettings() {
return clusterSettings;
}
}

View File

@ -48,6 +48,7 @@ import java.util.stream.Collectors;
* This service offers transactional application of updates settings.
*/
public abstract class AbstractScopedSettings extends AbstractComponent {
public static final String ARCHIVED_SETTINGS_PREFIX = "archived.";
private Settings lastSettingsApplied = Settings.EMPTY;
private final List<SettingUpdater<?>> settingUpdaters = new CopyOnWriteArrayList<>();
private final Map<String, Setting<?>> complexMatchers;
@ -478,4 +479,53 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
}
return null;
}
/**
* Archives broken or unknown settings. Any setting that is not recognized or fails
* validation will be archived. This means the setting is prefixed with {@value ARCHIVED_SETTINGS_PREFIX}
* and remains in the settings object. This can be used to detect broken settings via APIs.
*/
public Settings archiveUnknownOrBrokenSettings(Settings settings) {
Settings.Builder builder = Settings.builder();
boolean changed = false;
for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) {
try {
Setting<?> setting = get(entry.getKey());
if (setting != null) {
setting.get(settings);
builder.put(entry.getKey(), entry.getValue());
} else {
if (entry.getKey().startsWith(ARCHIVED_SETTINGS_PREFIX) || isPrivateSetting(entry.getKey())) {
builder.put(entry.getKey(), entry.getValue());
} else {
changed = true;
logger.warn("found unknown setting: {} value: {} - archiving", entry.getKey(), entry.getValue());
// we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there
// but we want users to be aware that some of their setting are broken and they can research why and what they need to do to replace them.
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
}
}
} catch (IllegalArgumentException ex) {
changed = true;
logger.warn("found invalid setting: {} value: {} - archiving",ex , entry.getKey(), entry.getValue());
// we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there
// but we want users to be aware that some of their setting sare broken and they can research why and what they need to do to replace them.
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
}
}
if (changed) {
return builder.build();
} else {
return settings;
}
}
/**
* Returns <code>true</code> iff the setting is a private setting ie. it should be treated as valid even though it has no internal
* representation. Otherwise <code>false</code>
*/
// TODO this should be replaced by Setting.Property.HIDDEN or something like this.
protected boolean isPrivateSetting(String key) {
return false;
}
}

View File

@ -135,7 +135,6 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
PrimaryShardAllocator.INDEX_RECOVERY_INITIAL_SHARDS_SETTING,
FsDirectoryService.INDEX_LOCK_FACTOR_SETTING,
EngineConfig.INDEX_CODEC_SETTING,
IndexWarmer.INDEX_NORMS_LOADING_SETTING,
// validate that built-in similarities don't get redefined
Setting.groupSetting("index.similarity.", (s) -> {
Map<String, Settings> groups = s.getAsGroups();
@ -171,7 +170,8 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
super.validateSettingKey(setting);
}
public boolean isPrivateSetting(String key) {
@Override
protected final boolean isPrivateSetting(String key) {
switch (key) {
case IndexMetaData.SETTING_CREATION_DATE:
case IndexMetaData.SETTING_INDEX_UUID:

View File

@ -24,12 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.gateway.MetaDataStateFormat;
import org.elasticsearch.gateway.MetaStateService;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;

View File

@ -961,6 +961,10 @@ public final class XContentBuilder implements BytesStream, Releasable {
return this;
}
public XContentBuilder timeValueField(String rawFieldName, String readableFieldName, TimeValue timeValue) throws IOException {
return timeValueField(rawFieldName, readableFieldName, timeValue.millis(), TimeUnit.MILLISECONDS);
}
public XContentBuilder timeValueField(String rawFieldName, String readableFieldName, long rawTime, TimeUnit timeUnit) throws
IOException {
if (humanReadable) {

View File

@ -19,6 +19,8 @@
package org.elasticsearch.gateway;
import com.carrotsearch.hppc.ObjectFloatHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.cluster.ClusterChangedEvent;
@ -28,9 +30,11 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.NodeServicesProvider;
import org.elasticsearch.indices.IndicesService;
@ -84,6 +88,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
}
}
ObjectFloatHashMap<Index> indices = new ObjectFloatHashMap<>();
MetaData electedGlobalState = null;
int found = 0;
for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) {
@ -96,34 +101,68 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
} else if (nodeState.metaData().version() > electedGlobalState.version()) {
electedGlobalState = nodeState.metaData();
}
for (ObjectCursor<IndexMetaData> cursor : nodeState.metaData().indices().values()) {
indices.addTo(cursor.value.getIndex(), 1);
}
}
if (found < requiredAllocation) {
listener.onFailure("found [" + found + "] metadata states, required [" + requiredAllocation + "]");
return;
}
// verify index metadata
MetaData.Builder metaDataBuilder = MetaData.builder(electedGlobalState);
for (IndexMetaData indexMetaData : electedGlobalState) {
try {
if (indexMetaData.getState() == IndexMetaData.State.OPEN) {
// verify that we can actually create this index - if not we recover it as closed with lots of warn logs
indicesService.verifyIndexMetadata(nodeServicesProvider, indexMetaData);
// update the global state, and clean the indices, we elect them in the next phase
MetaData.Builder metaDataBuilder = MetaData.builder(electedGlobalState).removeAllIndices();
assert !indices.containsKey(null);
final Object[] keys = indices.keys;
for (int i = 0; i < keys.length; i++) {
if (keys[i] != null) {
Index index = (Index) keys[i];
IndexMetaData electedIndexMetaData = null;
int indexMetaDataCount = 0;
for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) {
if (nodeState.metaData() == null) {
continue;
}
IndexMetaData indexMetaData = nodeState.metaData().index(index);
if (indexMetaData == null) {
continue;
}
if (electedIndexMetaData == null) {
electedIndexMetaData = indexMetaData;
} else if (indexMetaData.getVersion() > electedIndexMetaData.getVersion()) {
electedIndexMetaData = indexMetaData;
}
indexMetaDataCount++;
}
if (electedIndexMetaData != null) {
if (indexMetaDataCount < requiredAllocation) {
logger.debug("[{}] found [{}], required [{}], not adding", index, indexMetaDataCount, requiredAllocation);
} // TODO if this logging statement is correct then we are missing an else here
try {
if (electedIndexMetaData.getState() == IndexMetaData.State.OPEN) {
// verify that we can actually create this index - if not we recover it as closed with lots of warn logs
indicesService.verifyIndexMetadata(nodeServicesProvider, electedIndexMetaData);
}
} catch (Exception e) {
logger.warn("recovering index {} failed - recovering as closed", e, electedIndexMetaData.getIndex());
electedIndexMetaData = IndexMetaData.builder(electedIndexMetaData).state(IndexMetaData.State.CLOSE).build();
}
metaDataBuilder.put(electedIndexMetaData, false);
}
} catch (Exception e) {
logger.warn("recovering index {} failed - recovering as closed", e, indexMetaData.getIndex());
indexMetaData = IndexMetaData.builder(indexMetaData).state(IndexMetaData.State.CLOSE).build();
metaDataBuilder.put(indexMetaData, true);
}
}
final ClusterSettings clusterSettings = clusterService.getClusterSettings();
metaDataBuilder.persistentSettings(clusterSettings.archiveUnknownOrBrokenSettings(metaDataBuilder.persistentSettings()));
metaDataBuilder.transientSettings(clusterSettings.archiveUnknownOrBrokenSettings(metaDataBuilder.transientSettings()));
ClusterState.Builder builder = ClusterState.builder(clusterService.state().getClusterName());
builder.metaData(metaDataBuilder);
listener.onSuccess(builder.build());
}
public void reset() throws Exception {
try {
Path[] dataPaths = nodeEnv.nodeDataPaths();
logger.trace("removing node data paths: [{}]", (Object) dataPaths);
logger.trace("removing node data paths: [{}]", (Object)dataPaths);
IOUtils.rm(dataPaths);
} catch (Exception ex) {
logger.debug("failed to delete shard locations", ex);

View File

@ -234,8 +234,8 @@ public class GatewayMetaState extends AbstractComponent implements ClusterStateL
// We successfully checked all indices for backward compatibility and found no non-upgradable indices, which
// means the upgrade can continue. Now it's safe to overwrite index metadata with the new version.
for (IndexMetaData indexMetaData : updateIndexMetaData) {
// since we still haven't upgraded the index folders, we write index state in the old folder
metaStateService.writeIndex("upgrade", indexMetaData, nodeEnv.resolveIndexFolder(indexMetaData.getIndex().getUUID()));
// since we upgraded the index folders already, write index state in the upgraded index folder
metaStateService.writeIndex("upgrade", indexMetaData);
}
}

View File

@ -121,18 +121,11 @@ public class MetaStateService extends AbstractComponent {
* Writes the index state.
*/
void writeIndex(String reason, IndexMetaData indexMetaData) throws IOException {
writeIndex(reason, indexMetaData, nodeEnv.indexPaths(indexMetaData.getIndex()));
}
/**
* Writes the index state in <code>locations</code>, use {@link #writeGlobalState(String, MetaData)}
* to write index state in index paths
*/
void writeIndex(String reason, IndexMetaData indexMetaData, Path[] locations) throws IOException {
final Index index = indexMetaData.getIndex();
logger.trace("[{}] writing state, reason [{}]", index, reason);
try {
IndexMetaData.FORMAT.write(indexMetaData, indexMetaData.getVersion(), locations);
IndexMetaData.FORMAT.write(indexMetaData, indexMetaData.getVersion(),
nodeEnv.indexPaths(indexMetaData.getIndex()));
} catch (Throwable ex) {
logger.warn("[{}]: failed to write index state", ex, index);
throw new IOException("failed to write state for [" + index + "]", ex);

View File

@ -44,6 +44,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.FutureUtils;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
@ -57,7 +58,6 @@ import org.elasticsearch.index.cache.query.QueryCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineClosedException;
import org.elasticsearch.index.engine.EngineFactory;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MapperService;
@ -114,6 +114,8 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
private volatile AsyncRefreshTask refreshTask;
private volatile AsyncTranslogFSync fsyncTask;
private final SearchSlowLog searchSlowLog;
private final ThreadPool threadPool;
private final BigArrays bigArrays;
public IndexService(IndexSettings indexSettings, NodeEnvironment nodeEnv,
SimilarityService similarityService,
@ -132,9 +134,13 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
this.indexSettings = indexSettings;
this.analysisService = registry.build(indexSettings);
this.similarityService = similarityService;
this.mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, IndexService.this::newQueryShardContext);
this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache, nodeServicesProvider.getCircuitBreakerService(), mapperService);
this.mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry,
IndexService.this::newQueryShardContext);
this.indexFieldData = new IndexFieldDataService(indexSettings, indicesFieldDataCache,
nodeServicesProvider.getCircuitBreakerService(), mapperService);
this.shardStoreDeleter = shardStoreDeleter;
this.bigArrays = nodeServicesProvider.getBigArrays();
this.threadPool = nodeServicesProvider.getThreadPool();
this.eventListener = eventListener;
this.nodeEnv = nodeEnv;
this.nodeServicesProvider = nodeServicesProvider;
@ -142,7 +148,9 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
indexFieldData.setListener(new FieldDataCacheListener(this));
this.bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetCacheListener(this));
PercolatorQueryCache percolatorQueryCache = new PercolatorQueryCache(indexSettings, IndexService.this::newQueryShardContext);
this.warmer = new IndexWarmer(indexSettings.getSettings(), nodeServicesProvider.getThreadPool(), bitsetFilterCache.createListener(nodeServicesProvider.getThreadPool()), percolatorQueryCache.createListener(nodeServicesProvider.getThreadPool()));
this.warmer = new IndexWarmer(indexSettings.getSettings(), threadPool,
bitsetFilterCache.createListener(threadPool),
percolatorQueryCache.createListener(threadPool));
this.indexCache = new IndexCache(indexSettings, queryCache, bitsetFilterCache, percolatorQueryCache);
this.engineFactory = engineFactory;
// initialize this last -- otherwise if the wrapper requires any other member to be non-null we fail with an NPE
@ -232,7 +240,8 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
}
}
} finally {
IOUtils.close(bitsetFilterCache, indexCache, mapperService, indexFieldData, analysisService, refreshTask, fsyncTask, cache().getPercolatorQueryCache());
IOUtils.close(bitsetFilterCache, indexCache, mapperService, indexFieldData, analysisService, refreshTask, fsyncTask,
cache().getPercolatorQueryCache());
}
}
}
@ -302,7 +311,9 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
}
dataPathToShardCount.put(dataPath, curCount + 1);
}
path = ShardPath.selectNewPathForShard(nodeEnv, shardId, this.indexSettings, routing.getExpectedShardSize() == ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE ? getAvgShardSizeInBytes() : routing.getExpectedShardSize(),
path = ShardPath.selectNewPathForShard(nodeEnv, shardId, this.indexSettings,
routing.getExpectedShardSize() == ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE
? getAvgShardSizeInBytes() : routing.getExpectedShardSize(),
dataPathToShardCount);
logger.debug("{} creating using a new path [{}]", shardId, path);
} else {
@ -317,17 +328,22 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
// if we are on a shared FS we only own the shard (ie. we can safely delete it) if we are the primary.
final boolean canDeleteShardContent = IndexMetaData.isOnSharedFilesystem(indexSettings) == false ||
(primary && IndexMetaData.isOnSharedFilesystem(indexSettings));
final Engine.Warmer engineWarmer = (searcher, toLevel) -> {
final Engine.Warmer engineWarmer = (searcher) -> {
IndexShard shard = getShardOrNull(shardId.getId());
if (shard != null) {
warmer.warm(searcher, shard, IndexService.this.indexSettings, toLevel);
warmer.warm(searcher, shard, IndexService.this.indexSettings);
}
};
store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock, new StoreCloseListener(shardId, canDeleteShardContent, () -> eventListener.onStoreClosed(shardId)));
store = new Store(shardId, this.indexSettings, indexStore.newDirectoryService(path), lock,
new StoreCloseListener(shardId, canDeleteShardContent, () -> eventListener.onStoreClosed(shardId)));
if (useShadowEngine(primary, indexSettings)) {
indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldData, engineFactory, eventListener, searcherWrapper, nodeServicesProvider, searchSlowLog, engineWarmer); // no indexing listeners - shadow engines don't index
indexShard = new ShadowIndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService,
indexFieldData, engineFactory, eventListener, searcherWrapper, threadPool, bigArrays, searchSlowLog, engineWarmer);
// no indexing listeners - shadow engines don't index
} else {
indexShard = new IndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldData, engineFactory, eventListener, searcherWrapper, nodeServicesProvider, searchSlowLog, engineWarmer, listeners);
indexShard = new IndexShard(shardId, this.indexSettings, path, store, indexCache, mapperService, similarityService,
indexFieldData, engineFactory, eventListener, searcherWrapper, threadPool, bigArrays, searchSlowLog, engineWarmer,
listeners);
}
eventListener.indexShardStateChanged(indexShard, null, indexShard.state(), "shard created");
eventListener.afterIndexShardCreated(indexShard);
@ -372,7 +388,8 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
// and close the shard so no operations are allowed to it
if (indexShard != null) {
try {
final boolean flushEngine = deleted.get() == false && closed.get(); // only flush we are we closed (closed index or shutdown) and if we are not deleted
// only flush we are we closed (closed index or shutdown) and if we are not deleted
final boolean flushEngine = deleted.get() == false && closed.get();
indexShard.close(reason, flushEngine);
} catch (Throwable e) {
logger.debug("[{}] failed to close index shard", e, shardId);
@ -419,7 +436,8 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
}
/**
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via {@link QueryShardContext#setTypes(String...)}
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via
* {@link QueryShardContext#setTypes(String...)}
*/
public QueryShardContext newQueryShardContext() {
return new QueryShardContext(
@ -429,8 +447,12 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
);
}
ThreadPool getThreadPool() {
return nodeServicesProvider.getThreadPool();
public ThreadPool getThreadPool() {
return threadPool;
}
public BigArrays getBigArrays() {
return bigArrays;
}
public SearchSlowLog getSearchSlowLog() {
@ -502,21 +524,21 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
}
@Override
public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) {
public void onCache(ShardId shardId, String fieldName, Accountable ramUsage) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
shard.fieldData().onCache(shardId, fieldName, fieldDataType, ramUsage);
shard.fieldData().onCache(shardId, fieldName, ramUsage);
}
}
}
@Override
public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, long sizeInBytes) {
if (shardId != null) {
final IndexShard shard = indexService.getShardOrNull(shardId.id());
if (shard != null) {
shard.fieldData().onRemoval(shardId, fieldName, fieldDataType, wasEvicted, sizeInBytes);
shard.fieldData().onRemoval(shardId, fieldName, wasEvicted, sizeInBytes);
}
}
}
@ -547,7 +569,8 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
AliasMetaData alias = aliases.get(aliasName);
if (alias == null) {
// This shouldn't happen unless alias disappeared after filteringAliases was called.
throw new InvalidAliasNameException(indexSettings.getIndex(), aliasNames[0], "Unknown alias name was passed to alias Filter");
throw new InvalidAliasNameException(indexSettings.getIndex(), aliasNames[0],
"Unknown alias name was passed to alias Filter");
}
Query parsedFilter = parse(alias, context);
if (parsedFilter != null) {
@ -723,7 +746,8 @@ public final class IndexService extends AbstractIndexComponent implements IndexC
} catch (Exception ex) {
if (lastThrownException == null || sameException(lastThrownException, ex) == false) {
// prevent the annoying fact of logging the same stuff all the time with an interval of 1 sec will spam all your logs
indexService.logger.warn("failed to run task {} - suppressing re-occurring exceptions unless the exception changes", ex, toString());
indexService.logger.warn("failed to run task {} - suppressing re-occurring exceptions unless the exception changes",
ex, toString());
lastThrownException = ex;
}
} finally {

View File

@ -19,14 +19,11 @@
package org.elasticsearch.index;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.DirectoryReader;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.DocumentMapper;
@ -50,9 +47,6 @@ import java.util.concurrent.TimeUnit;
*/
public final class IndexWarmer extends AbstractComponent {
public static final Setting<MappedFieldType.Loading> INDEX_NORMS_LOADING_SETTING = new Setting<>("index.norms.loading",
MappedFieldType.Loading.LAZY.toString(), (s) -> MappedFieldType.Loading.parse(s, MappedFieldType.Loading.LAZY),
Property.IndexScope);
private final List<Listener> listeners;
IndexWarmer(Settings settings, ThreadPool threadPool, Listener... listeners) {
@ -66,7 +60,7 @@ public final class IndexWarmer extends AbstractComponent {
this.listeners = Collections.unmodifiableList(list);
}
void warm(Engine.Searcher searcher, IndexShard shard, IndexSettings settings, boolean isTopReader) {
void warm(Engine.Searcher searcher, IndexShard shard, IndexSettings settings) {
if (shard.state() == IndexShardState.CLOSED) {
return;
}
@ -74,22 +68,14 @@ public final class IndexWarmer extends AbstractComponent {
return;
}
if (logger.isTraceEnabled()) {
if (isTopReader) {
logger.trace("{} top warming [{}]", shard.shardId(), searcher.reader());
} else {
logger.trace("{} warming [{}]", shard.shardId(), searcher.reader());
}
logger.trace("{} top warming [{}]", shard.shardId(), searcher.reader());
}
shard.warmerService().onPreWarm();
long time = System.nanoTime();
final List<TerminationHandle> terminationHandles = new ArrayList<>();
// get a handle on pending tasks
for (final Listener listener : listeners) {
if (isTopReader) {
terminationHandles.add(listener.warmTopReader(shard, searcher));
} else {
terminationHandles.add(listener.warmNewReaders(shard, searcher));
}
terminationHandles.add(listener.warmReader(shard, searcher));
}
// wait for termination
for (TerminationHandle terminationHandle : terminationHandles) {
@ -97,22 +83,14 @@ public final class IndexWarmer extends AbstractComponent {
terminationHandle.awaitTermination();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
if (isTopReader) {
logger.warn("top warming has been interrupted", e);
} else {
logger.warn("warming has been interrupted", e);
}
logger.warn("top warming has been interrupted", e);
break;
}
}
long took = System.nanoTime() - time;
shard.warmerService().onPostWarm(took);
if (shard.warmerService().logger().isTraceEnabled()) {
if (isTopReader) {
shard.warmerService().logger().trace("top warming took [{}]", new TimeValue(took, TimeUnit.NANOSECONDS));
} else {
shard.warmerService().logger().trace("warming took [{}]", new TimeValue(took, TimeUnit.NANOSECONDS));
}
shard.warmerService().logger().trace("top warming took [{}]", new TimeValue(took, TimeUnit.NANOSECONDS));
}
}
@ -127,9 +105,7 @@ public final class IndexWarmer extends AbstractComponent {
public interface Listener {
/** Queue tasks to warm-up the given segments and return handles that allow to wait for termination of the
* execution of those tasks. */
TerminationHandle warmNewReaders(IndexShard indexShard, Engine.Searcher searcher);
TerminationHandle warmTopReader(IndexShard indexShard, Engine.Searcher searcher);
TerminationHandle warmReader(IndexShard indexShard, Engine.Searcher searcher);
}
private static class FieldDataWarmer implements IndexWarmer.Listener {
@ -140,67 +116,17 @@ public final class IndexWarmer extends AbstractComponent {
}
@Override
public TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) {
final MapperService mapperService = indexShard.mapperService();
final Map<String, MappedFieldType> warmUp = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
for (FieldMapper fieldMapper : docMapper.mappers()) {
final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
final String indexName = fieldMapper.fieldType().name();
if (fieldDataType == null) {
continue;
}
if (fieldDataType.getLoading() == MappedFieldType.Loading.LAZY) {
continue;
}
if (warmUp.containsKey(indexName)) {
continue;
}
warmUp.put(indexName, fieldMapper.fieldType());
}
}
final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService();
final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size() * warmUp.size());
for (final LeafReaderContext ctx : searcher.reader().leaves()) {
for (final MappedFieldType fieldType : warmUp.values()) {
executor.execute(() -> {
try {
final long start = System.nanoTime();
indexFieldDataService.getForField(fieldType).load(ctx);
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed fielddata for [{}], took [{}]", fieldType.name(),
TimeValue.timeValueNanos(System.nanoTime() - start));
}
} catch (Throwable t) {
indexShard.warmerService().logger().warn("failed to warm-up fielddata for [{}]", t, fieldType.name());
} finally {
latch.countDown();
}
});
}
}
return () -> latch.await();
}
@Override
public TerminationHandle warmTopReader(final IndexShard indexShard, final Engine.Searcher searcher) {
public TerminationHandle warmReader(final IndexShard indexShard, final Engine.Searcher searcher) {
final MapperService mapperService = indexShard.mapperService();
final Map<String, MappedFieldType> warmUpGlobalOrdinals = new HashMap<>();
for (DocumentMapper docMapper : mapperService.docMappers(false)) {
for (FieldMapper fieldMapper : docMapper.mappers()) {
final FieldDataType fieldDataType = fieldMapper.fieldType().fieldDataType();
final String indexName = fieldMapper.fieldType().name();
if (fieldDataType == null) {
final MappedFieldType fieldType = fieldMapper.fieldType();
final String indexName = fieldType.name();
if (fieldType.eagerGlobalOrdinals() == false) {
continue;
}
if (fieldDataType.getLoading() != MappedFieldType.Loading.EAGER_GLOBAL_ORDINALS) {
continue;
}
if (warmUpGlobalOrdinals.containsKey(indexName)) {
continue;
}
warmUpGlobalOrdinals.put(indexName, fieldMapper.fieldType());
warmUpGlobalOrdinals.put(indexName, fieldType);
}
}
final IndexFieldDataService indexFieldDataService = indexShard.indexFieldDataService();
@ -210,7 +136,12 @@ public final class IndexWarmer extends AbstractComponent {
try {
final long start = System.nanoTime();
IndexFieldData.Global ifd = indexFieldDataService.getForField(fieldType);
ifd.loadGlobal(searcher.getDirectoryReader());
DirectoryReader reader = searcher.getDirectoryReader();
IndexFieldData<?> global = ifd.loadGlobal(reader);
if (reader.leaves().isEmpty() == false) {
global.load(reader.leaves().get(0));
}
if (indexShard.warmerService().logger().isTraceEnabled()) {
indexShard.warmerService().logger().trace("warmed global ordinals for [{}], took [{}]", fieldType.name(),
TimeValue.timeValueNanos(System.nanoTime() - start));

View File

@ -67,8 +67,10 @@ import org.elasticsearch.env.Environment;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@ -163,7 +165,8 @@ public class Analysis {
NAMED_STOP_WORDS = unmodifiableMap(namedStopWords);
}
public static CharArraySet parseWords(Environment env, Settings settings, String name, CharArraySet defaultWords, Map<String, Set<?>> namedWords, boolean ignoreCase) {
public static CharArraySet parseWords(Environment env, Settings settings, String name, CharArraySet defaultWords,
Map<String, Set<?>> namedWords, boolean ignoreCase) {
String value = settings.get(name);
if (value != null) {
if ("_none_".equals(value)) {
@ -237,12 +240,17 @@ public class Analysis {
}
}
final Path wordListFile = env.configFile().resolve(wordListPath);
final Path path = env.configFile().resolve(wordListPath);
try (BufferedReader reader = FileSystemUtils.newBufferedReader(wordListFile.toUri().toURL(), StandardCharsets.UTF_8)) {
try (BufferedReader reader = FileSystemUtils.newBufferedReader(path.toUri().toURL(), StandardCharsets.UTF_8)) {
return loadWordList(reader, "#");
} catch (CharacterCodingException ex) {
String message = String.format(Locale.ROOT,
"Unsupported character encoding detected while reading %s_path: %s - files must be UTF-8 encoded",
settingPrefix, path.toString());
throw new IllegalArgumentException(message, ex);
} catch (IOException ioe) {
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix);
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, path.toString());
throw new IllegalArgumentException(message, ioe);
}
}
@ -256,7 +264,7 @@ public class Analysis {
} else {
br = new BufferedReader(reader);
}
String word = null;
String word;
while ((word = br.readLine()) != null) {
if (!Strings.hasText(word)) {
continue;
@ -283,13 +291,16 @@ public class Analysis {
if (filePath == null) {
return null;
}
final Path path = env.configFile().resolve(filePath);
try {
return FileSystemUtils.newBufferedReader(path.toUri().toURL(), StandardCharsets.UTF_8);
} catch (CharacterCodingException ex) {
String message = String.format(Locale.ROOT,
"Unsupported character encoding detected while reading %s_path: %s files must be UTF-8 encoded",
settingPrefix, path.toString());
throw new IllegalArgumentException(message, ex);
} catch (IOException ioe) {
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix);
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, path.toString());
throw new IllegalArgumentException(message, ioe);
}
}

View File

@ -216,7 +216,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
}
@Override
public IndexWarmer.TerminationHandle warmNewReaders(final IndexShard indexShard, final Engine.Searcher searcher) {
public IndexWarmer.TerminationHandle warmReader(final IndexShard indexShard, final Engine.Searcher searcher) {
if (indexSettings.getIndex().equals(indexShard.indexSettings().getIndex()) == false) {
// this is from a different index
return TerminationHandle.NO_WAIT;
@ -268,11 +268,6 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
return () -> latch.await();
}
@Override
public TerminationHandle warmTopReader(IndexShard indexShard, Engine.Searcher searcher) {
return TerminationHandle.NO_WAIT;
}
}
Cache<Object, Cache<Query, Value>> getLoadedFilters() {

View File

@ -1219,12 +1219,9 @@ public abstract class Engine implements Closeable {
*/
public interface Warmer {
/**
* Called once a new Searcher is opened.
* @param searcher the searcer to warm
* @param isTopLevelReader <code>true</code> iff the searcher is build from a top-level reader.
* Otherwise the searcher might be build from a leaf reader to warm in isolation
* Called once a new Searcher is opened on the top-level searcher.
*/
void warm(Engine.Searcher searcher, boolean isTopLevelReader);
void warm(Engine.Searcher searcher);
}
/**

View File

@ -108,7 +108,7 @@ public final class EngineConfig {
final Settings settings = indexSettings.getSettings();
this.indexSettings = indexSettings;
this.threadPool = threadPool;
this.warmer = warmer == null ? (a,b) -> {} : warmer;
this.warmer = warmer == null ? (a) -> {} : warmer;
this.store = store;
this.deletionPolicy = deletionPolicy;
this.mergePolicy = mergePolicy;

View File

@ -24,13 +24,10 @@ import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriter.IndexReaderWarmer;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.LiveIndexWriterConfig;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.Term;
@ -51,7 +48,6 @@ import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.LoggerInfoStream;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.lucene.index.ElasticsearchLeafReader;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.math.MathUtils;
import org.elasticsearch.common.unit.ByteSizeValue;
@ -70,7 +66,6 @@ import org.elasticsearch.index.translog.TranslogCorruptedException;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
@ -91,7 +86,6 @@ public class InternalEngine extends Engine {
*/
private volatile long lastDeleteVersionPruneTimeMSec;
private final Engine.Warmer warmer;
private final Translog translog;
private final ElasticsearchConcurrentMergeScheduler mergeScheduler;
@ -131,7 +125,6 @@ public class InternalEngine extends Engine {
boolean success = false;
try {
this.lastDeleteVersionPruneTimeMSec = engineConfig.getThreadPool().estimatedTimeInMillis();
this.warmer = engineConfig.getWarmer();
mergeScheduler = scheduler = new EngineMergeScheduler(engineConfig.getShardId(), engineConfig.getIndexSettings());
this.dirtyLocks = new Object[Runtime.getRuntime().availableProcessors() * 10]; // we multiply it to have enough...
for (int i = 0; i < dirtyLocks.length; i++) {
@ -931,30 +924,6 @@ public class InternalEngine extends Engine {
iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().mbFrac());
iwc.setCodec(engineConfig.getCodec());
iwc.setUseCompoundFile(true); // always use compound on flush - reduces # of file-handles on refresh
// Warm-up hook for newly-merged segments. Warming up segments here is better since it will be performed at the end
// of the merge operation and won't slow down _refresh
iwc.setMergedSegmentWarmer(new IndexReaderWarmer() {
@Override
public void warm(LeafReader reader) throws IOException {
try {
LeafReader esLeafReader = new ElasticsearchLeafReader(reader, shardId);
assert isMergedSegment(esLeafReader);
if (warmer != null) {
final Engine.Searcher searcher = new Searcher("warmer", searcherFactory.newSearcher(esLeafReader, null));
warmer.warm(searcher, false);
}
} catch (Throwable t) {
// Don't fail a merge if the warm-up failed
if (isClosed.get() == false) {
logger.warn("Warm-up failed", t);
}
if (t instanceof Error) {
// assertion/out-of-memory error, don't ignore those
throw (Error) t;
}
}
}
});
return new IndexWriter(store.directory(), iwc);
} catch (LockObtainFailedException ex) {
logger.warn("could not lock IndexWriter", ex);
@ -965,14 +934,12 @@ public class InternalEngine extends Engine {
/** Extended SearcherFactory that warms the segments if needed when acquiring a new searcher */
final static class SearchFactory extends EngineSearcherFactory {
private final Engine.Warmer warmer;
private final ShardId shardId;
private final ESLogger logger;
private final AtomicBoolean isEngineClosed;
SearchFactory(ESLogger logger, AtomicBoolean isEngineClosed, EngineConfig engineConfig) {
super(engineConfig);
warmer = engineConfig.getWarmer();
shardId = engineConfig.getShardId();
this.logger = logger;
this.isEngineClosed = isEngineClosed;
}
@ -987,55 +954,13 @@ public class InternalEngine extends Engine {
return searcher;
}
if (warmer != null) {
// we need to pass a custom searcher that does not release anything on Engine.Search Release,
// we will release explicitly
IndexSearcher newSearcher = null;
boolean closeNewSearcher = false;
try {
if (previousReader == null) {
// we are starting up - no writer active so we can't acquire a searcher.
newSearcher = searcher;
} else {
// figure out the newSearcher, with only the new readers that are relevant for us
List<IndexReader> readers = new ArrayList<>();
for (LeafReaderContext newReaderContext : reader.leaves()) {
if (isMergedSegment(newReaderContext.reader())) {
// merged segments are already handled by IndexWriterConfig.setMergedSegmentWarmer
continue;
}
boolean found = false;
for (LeafReaderContext currentReaderContext : previousReader.leaves()) {
if (currentReaderContext.reader().getCoreCacheKey().equals(newReaderContext.reader().getCoreCacheKey())) {
found = true;
break;
}
}
if (!found) {
readers.add(newReaderContext.reader());
}
}
if (!readers.isEmpty()) {
// we don't want to close the inner readers, just increase ref on them
IndexReader newReader = new MultiReader(readers.toArray(new IndexReader[readers.size()]), false);
newSearcher = super.newSearcher(newReader, null);
closeNewSearcher = true;
}
}
if (newSearcher != null) {
warmer.warm(new Searcher("new_reader_warming", newSearcher), false);
}
assert searcher.getIndexReader() instanceof ElasticsearchDirectoryReader : "this class needs an ElasticsearchDirectoryReader but got: " + searcher.getIndexReader().getClass();
warmer.warm(new Searcher("top_reader_warming", searcher), true);
warmer.warm(new Searcher("top_reader_warming", searcher));
} catch (Throwable e) {
if (isEngineClosed.get() == false) {
logger.warn("failed to prepare/warm", e);
}
} finally {
// no need to release the fullSearcher, nothing really is done...
if (newSearcher != null && closeNewSearcher) {
IOUtils.closeWhileHandlingException(newSearcher.getIndexReader()); // ignore
}
}
}
return searcher;

View File

@ -1,92 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MappedFieldType.Loading;
/**
*/
public class FieldDataType {
public static final String FORMAT_KEY = "format";
public static final String DOC_VALUES_FORMAT_VALUE = "doc_values";
private final String type;
private final String typeFormat;
private final Loading loading;
private final Settings settings;
public FieldDataType(String type) {
this(type, Settings.Builder.EMPTY_SETTINGS);
}
public FieldDataType(String type, Settings.Builder builder) {
this(type, builder.build());
}
public FieldDataType(String type, Settings settings) {
this.type = type;
this.typeFormat = "index.fielddata.type." + type + "." + FORMAT_KEY;
this.settings = settings;
final String loading = settings.get(Loading.KEY);
this.loading = Loading.parse(loading, Loading.LAZY);
}
public String getType() {
return this.type;
}
public Settings getSettings() {
return this.settings;
}
public Loading getLoading() {
return loading;
}
public String getFormat(Settings indexSettings) {
String format = settings.get(FORMAT_KEY);
if (format == null && indexSettings != null) {
format = indexSettings.get(typeFormat);
}
return format;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FieldDataType that = (FieldDataType) o;
if (!settings.equals(that.settings)) return false;
if (!type.equals(that.type)) return false;
return true;
}
@Override
public int hashCode() {
int result = type.hashCode();
result = 31 * result + settings.hashCode();
return result;
}
}

View File

@ -67,18 +67,6 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
return null;
}
}
/**
* Gets a memory storage hint that should be honored if possible but is not mandatory
*/
public static MemoryStorageFormat getMemoryStorageHint(FieldDataType fieldDataType) {
// backwards compatibility
String s = fieldDataType.getSettings().get("ordinals");
if (s != null) {
return "always".equals(s) ? MemoryStorageFormat.ORDINALS : null;
}
return MemoryStorageFormat.fromString(fieldDataType.getSettings().get(SETTING_MEMORY_STORAGE_HINT));
}
}
/**
@ -86,11 +74,6 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
*/
String getFieldName();
/**
* The field data type.
*/
FieldDataType getFieldDataType();
/**
* Loads the atomic field data for the reader, possibly cached.
*/

View File

@ -48,12 +48,12 @@ public interface IndexFieldDataCache {
/**
* Called after the fielddata is loaded during the cache phase
*/
default void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage){}
default void onCache(ShardId shardId, String fieldName, Accountable ramUsage){}
/**
* Called after the fielddata is unloaded
*/
default void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes){}
default void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, long sizeInBytes){}
}
class None implements IndexFieldDataCache {

View File

@ -70,116 +70,19 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
}
}, Property.IndexScope);
private static final IndexFieldData.Builder MISSING_DOC_VALUES_BUILDER = (indexProperties, fieldType, cache, breakerService, mapperService1) -> {
throw new IllegalStateException("Can't load fielddata on [" + fieldType.name()
+ "] of index [" + indexProperties.getIndex().getName() + "] because fielddata is unsupported on fields of type ["
+ fieldType.fieldDataType().getType() + "]. Use doc values instead.");
};
private static final String ARRAY_FORMAT = "array";
private static final String DISABLED_FORMAT = "disabled";
private static final String DOC_VALUES_FORMAT = "doc_values";
private static final String PAGED_BYTES_FORMAT = "paged_bytes";
private static final IndexFieldData.Builder DISABLED_BUILDER = new IndexFieldData.Builder() {
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
throw new IllegalStateException("Field data loading is forbidden on [" + fieldType.name() + "]");
}
};
private final static Map<String, IndexFieldData.Builder> buildersByType;
private final static Map<String, IndexFieldData.Builder> docValuesBuildersByType;
private final static Map<Tuple<String, String>, IndexFieldData.Builder> buildersByTypeAndFormat;
private final CircuitBreakerService circuitBreakerService;
static {
Map<String, IndexFieldData.Builder> buildersByTypeBuilder = new HashMap<>();
buildersByTypeBuilder.put("string", new PagedBytesIndexFieldData.Builder());
buildersByTypeBuilder.put(TextFieldMapper.CONTENT_TYPE, new PagedBytesIndexFieldData.Builder());
buildersByTypeBuilder.put(KeywordFieldMapper.CONTENT_TYPE, MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("float", MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("double", MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("byte", MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("short", MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("int", MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("long", MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("geo_point", new GeoPointArrayIndexFieldData.Builder());
buildersByTypeBuilder.put(ParentFieldMapper.NAME, new ParentChildIndexFieldData.Builder());
buildersByTypeBuilder.put(IndexFieldMapper.NAME, new IndexIndexFieldData.Builder());
buildersByTypeBuilder.put("binary", DISABLED_BUILDER);
buildersByTypeBuilder.put(BooleanFieldMapper.CONTENT_TYPE, MISSING_DOC_VALUES_BUILDER);
buildersByType = unmodifiableMap(buildersByTypeBuilder);
docValuesBuildersByType = MapBuilder.<String, IndexFieldData.Builder>newMapBuilder()
.put("string", new DocValuesIndexFieldData.Builder())
.put(KeywordFieldMapper.CONTENT_TYPE, new DocValuesIndexFieldData.Builder())
.put("float", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.FLOAT))
.put("double", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.DOUBLE))
.put("byte", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BYTE))
.put("short", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.SHORT))
.put("int", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.INT))
.put("long", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.LONG))
.put("geo_point", new AbstractGeoPointDVIndexFieldData.Builder())
.put("binary", new BytesBinaryDVIndexFieldData.Builder())
.put(BooleanFieldMapper.CONTENT_TYPE, new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BOOLEAN))
.immutableMap();
buildersByTypeAndFormat = MapBuilder.<Tuple<String, String>, IndexFieldData.Builder>newMapBuilder()
.put(Tuple.tuple("string", PAGED_BYTES_FORMAT), new PagedBytesIndexFieldData.Builder())
.put(Tuple.tuple("string", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
.put(Tuple.tuple("string", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple(TextFieldMapper.CONTENT_TYPE, PAGED_BYTES_FORMAT), new PagedBytesIndexFieldData.Builder())
.put(Tuple.tuple(TextFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple(KeywordFieldMapper.CONTENT_TYPE, DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder())
.put(Tuple.tuple(KeywordFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("float", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.FLOAT))
.put(Tuple.tuple("float", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("double", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.DOUBLE))
.put(Tuple.tuple("double", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("byte", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BYTE))
.put(Tuple.tuple("byte", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("short", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.SHORT))
.put(Tuple.tuple("short", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("int", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.INT))
.put(Tuple.tuple("int", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("long", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.LONG))
.put(Tuple.tuple("long", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("geo_point", ARRAY_FORMAT), new GeoPointArrayIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DOC_VALUES_FORMAT), new AbstractGeoPointDVIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple("binary", DOC_VALUES_FORMAT), new BytesBinaryDVIndexFieldData.Builder())
.put(Tuple.tuple("binary", DISABLED_FORMAT), DISABLED_BUILDER)
.put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BOOLEAN))
.put(Tuple.tuple(BooleanFieldMapper.CONTENT_TYPE, DISABLED_FORMAT), DISABLED_BUILDER)
.immutableMap();
}
private final IndicesFieldDataCache indicesFieldDataCache;
// the below map needs to be modified under a lock
private final Map<String, IndexFieldDataCache> fieldDataCaches = new HashMap<>();
private final MapperService mapperService;
private static final IndexFieldDataCache.Listener DEFAULT_NOOP_LISTENER = new IndexFieldDataCache.Listener() {
@Override
public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) {
public void onCache(ShardId shardId, String fieldName, Accountable ramUsage) {
}
@Override
public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, long sizeInBytes) {
}
};
private volatile IndexFieldDataCache.Listener listener = DEFAULT_NOOP_LISTENER;
@ -223,42 +126,15 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
@SuppressWarnings("unchecked")
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
final String fieldName = fieldType.name();
final FieldDataType type = fieldType.fieldDataType();
if (type == null) {
throw new IllegalArgumentException("found no fielddata type for field [" + fieldName + "]");
}
final boolean docValues = fieldType.hasDocValues();
IndexFieldData.Builder builder = null;
String format = type.getFormat(indexSettings.getSettings());
if (format != null && FieldDataType.DOC_VALUES_FORMAT_VALUE.equals(format) && !docValues) {
logger.warn("field [{}] has no doc values, will use default field data format", fieldName);
format = null;
}
if (format != null) {
builder = buildersByTypeAndFormat.get(Tuple.tuple(type.getType(), format));
if (builder == null) {
logger.warn("failed to find format [{}] for field [{}], will use default", format, fieldName);
}
}
if (builder == null && docValues) {
builder = docValuesBuildersByType.get(type.getType());
}
if (builder == null) {
builder = buildersByType.get(type.getType());
}
if (builder == null) {
throw new IllegalArgumentException("failed to find field data builder for field " + fieldName + ", and type " + type.getType());
}
IndexFieldData.Builder builder = fieldType.fielddataBuilder();
IndexFieldDataCache cache;
synchronized (this) {
cache = fieldDataCaches.get(fieldName);
if (cache == null) {
// we default to node level cache, which in turn defaults to be unbounded
// this means changing the node level settings is simple, just set the bounds there
String cacheType = type.getSettings().get("cache", indexSettings.getValue(INDEX_FIELDDATA_CACHE_KEY));
String cacheType = indexSettings.getValue(INDEX_FIELDDATA_CACHE_KEY);
if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) {
cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName, type);
cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName);
} else if ("none".equals(cacheType)){
cache = new IndexFieldDataCache.None();
} else {

View File

@ -51,7 +51,7 @@ public class ShardFieldData implements IndexFieldDataCache.Listener {
}
@Override
public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable ramUsage) {
public void onCache(ShardId shardId, String fieldName, Accountable ramUsage) {
totalMetric.inc(ramUsage.ramBytesUsed());
CounterMetric total = perFieldTotals.get(fieldName);
if (total != null) {
@ -67,7 +67,7 @@ public class ShardFieldData implements IndexFieldDataCache.Listener {
}
@Override
public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, long sizeInBytes) {
if (wasEvicted) {
evictionsMetric.inc();
}

View File

@ -70,7 +70,7 @@ public enum GlobalOrdinalsBuilder {
);
}
return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldName(),
indexFieldData.getFieldDataType(), atomicFD, ordinalMap, memorySizeInBytes
atomicFD, ordinalMap, memorySizeInBytes
);
}
@ -104,7 +104,7 @@ public enum GlobalOrdinalsBuilder {
}
final OrdinalMap ordinalMap = OrdinalMap.build(null, subs, PackedInts.DEFAULT);
return new InternalGlobalOrdinalsIndexFieldData(indexSettings, indexFieldData.getFieldName(),
indexFieldData.getFieldDataType(), atomicFD, ordinalMap, 0
atomicFD, ordinalMap, 0
);
}

View File

@ -25,11 +25,9 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.MultiValueMode;
import java.util.Collection;
@ -41,13 +39,11 @@ import java.util.Collections;
public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent implements IndexOrdinalsFieldData, Accountable {
private final String fieldName;
private final FieldDataType fieldDataType;
private final long memorySizeInBytes;
protected GlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, long memorySizeInBytes) {
protected GlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, long memorySizeInBytes) {
super(indexSettings);
this.fieldName = fieldName;
this.fieldDataType = fieldDataType;
this.memorySizeInBytes = memorySizeInBytes;
}
@ -71,11 +67,6 @@ public abstract class GlobalOrdinalsIndexFieldData extends AbstractIndexComponen
return fieldName;
}
@Override
public FieldDataType getFieldDataType() {
return fieldDataType;
}
@Override
public XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested) {
throw new UnsupportedOperationException("no global ordinals sorting yet");

View File

@ -24,9 +24,7 @@ import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.util.Collection;
@ -37,8 +35,8 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel
private final Atomic[] atomicReaders;
InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) {
super(indexSettings, fieldName, fieldDataType, memorySizeInBytes);
InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, AtomicOrdinalsFieldData[] segmentAfd, OrdinalMap ordinalMap, long memorySizeInBytes) {
super(indexSettings, fieldName, memorySizeInBytes);
this.atomicReaders = new Atomic[segmentAfd.length];
for (int i = 0; i < segmentAfd.length; i++) {
atomicReaders[i] = new Atomic(segmentAfd[i], ordinalMap, i);

View File

@ -35,7 +35,6 @@ import org.apache.lucene.util.LongsRef;
import org.apache.lucene.util.packed.GrowableWriter;
import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PagedGrowableWriter;
import org.elasticsearch.common.settings.Settings;
import java.io.Closeable;
import java.io.IOException;
@ -287,20 +286,13 @@ public final class OrdinalsBuilder implements Closeable {
private OrdinalsStore ordinals;
private final LongsRef spare;
public OrdinalsBuilder(long numTerms, int maxDoc, float acceptableOverheadRatio) throws IOException {
public OrdinalsBuilder(int maxDoc, float acceptableOverheadRatio) throws IOException {
this.maxDoc = maxDoc;
int startBitsPerValue = 8;
if (numTerms >= 0) {
startBitsPerValue = PackedInts.bitsRequired(numTerms);
}
ordinals = new OrdinalsStore(maxDoc, startBitsPerValue, acceptableOverheadRatio);
spare = new LongsRef();
}
public OrdinalsBuilder(int maxDoc, float acceptableOverheadRatio) throws IOException {
this(-1, maxDoc, acceptableOverheadRatio);
}
public OrdinalsBuilder(int maxDoc) throws IOException {
this(maxDoc, DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
}
@ -413,10 +405,9 @@ public final class OrdinalsBuilder implements Closeable {
/**
* Builds an {@link Ordinals} instance from the builders current state.
*/
public Ordinals build(Settings settings) {
final float acceptableOverheadRatio = settings.getAsFloat("acceptable_overhead_ratio", PackedInts.FASTEST);
final boolean forceMultiOrdinals = settings.getAsBoolean(FORCE_MULTI_ORDINALS, false);
if (forceMultiOrdinals || numMultiValuedDocs > 0 || MultiOrdinals.significantlySmallerThanSinglePackedOrdinals(maxDoc, numDocsWithValue, getValueCount(), acceptableOverheadRatio)) {
public Ordinals build() {
final float acceptableOverheadRatio = PackedInts.DEFAULT;
if (numMultiValuedDocs > 0 || MultiOrdinals.significantlySmallerThanSinglePackedOrdinals(maxDoc, numDocsWithValue, getValueCount(), acceptableOverheadRatio)) {
// MultiOrdinals can be smaller than SinglePackedOrdinals for sparse fields
return new MultiOrdinals(this, acceptableOverheadRatio);
} else {

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
@ -40,8 +39,8 @@ import java.io.IOException;
public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoPointFieldData {
AbstractGeoPointDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) {
super(index, fieldName, fieldDataType);
AbstractGeoPointDVIndexFieldData(Index index, String fieldName) {
super(index, fieldName);
}
@Override
@ -55,8 +54,8 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie
public static class GeoPointDVIndexFieldData extends AbstractGeoPointDVIndexFieldData {
final boolean indexCreatedBefore2x;
public GeoPointDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType, final boolean indexCreatedBefore2x) {
super(index, fieldName, fieldDataType);
public GeoPointDVIndexFieldData(Index index, String fieldName, final boolean indexCreatedBefore2x) {
super(index, fieldName);
this.indexCreatedBefore2x = indexCreatedBefore2x;
}
@ -82,8 +81,12 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
if (indexSettings.getIndexVersionCreated().before(Version.V_2_2_0)
&& fieldType.hasDocValues() == false) {
return new GeoPointArrayIndexFieldData(indexSettings, fieldType.name(), cache, breakerService);
}
// Ignore breaker
return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name(), fieldType.fieldDataType(),
return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name(),
indexSettings.getIndexVersionCreated().before(Version.V_2_2_0));
}
}

View File

@ -27,7 +27,6 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.RamAccountingTermsEnum;
@ -39,13 +38,11 @@ import java.io.IOException;
public abstract class AbstractIndexFieldData<FD extends AtomicFieldData> extends AbstractIndexComponent implements IndexFieldData<FD> {
private final String fieldName;
protected final FieldDataType fieldDataType;
protected final IndexFieldDataCache cache;
public AbstractIndexFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache) {
public AbstractIndexFieldData(IndexSettings indexSettings, String fieldName, IndexFieldDataCache cache) {
super(indexSettings);
this.fieldName = fieldName;
this.fieldDataType = fieldDataType;
this.cache = cache;
}
@ -54,11 +51,6 @@ public abstract class AbstractIndexFieldData<FD extends AtomicFieldData> extends
return this.fieldName;
}
@Override
public FieldDataType getFieldDataType() {
return fieldDataType;
}
@Override
public void clear() {
cache.clear(fieldName);

View File

@ -30,7 +30,6 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
@ -102,8 +101,8 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<Ato
}
}
public AbstractIndexGeoPointFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType, IndexFieldDataCache cache) {
super(indexSettings, fieldName, fieldDataType, cache);
public AbstractIndexGeoPointFieldData(IndexSettings indexSettings, String fieldName, IndexFieldDataCache cache) {
super(indexSettings, fieldName, cache);
}
@Override

View File

@ -25,13 +25,10 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
@ -41,23 +38,21 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public abstract class AbstractIndexOrdinalsFieldData extends AbstractIndexFieldData<AtomicOrdinalsFieldData> implements IndexOrdinalsFieldData {
protected Settings frequency;
protected Settings regex;
private final double minFrequency, maxFrequency;
private final int minSegmentSize;
protected final CircuitBreakerService breakerService;
protected AbstractIndexOrdinalsFieldData(IndexSettings indexSettings, String fieldName, FieldDataType fieldDataType,
IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(indexSettings, fieldName, fieldDataType, cache);
final Map<String, Settings> groups = fieldDataType.getSettings().getGroups("filter");
frequency = groups.get("frequency");
regex = groups.get("regex");
protected AbstractIndexOrdinalsFieldData(IndexSettings indexSettings, String fieldName,
IndexFieldDataCache cache, CircuitBreakerService breakerService,
double minFrequency, double maxFrequency, int minSegmentSize) {
super(indexSettings, fieldName, cache);
this.breakerService = breakerService;
this.minFrequency = minFrequency;
this.maxFrequency = maxFrequency;
this.minSegmentSize = minSegmentSize;
}
@Override
@ -110,17 +105,24 @@ public abstract class AbstractIndexOrdinalsFieldData extends AbstractIndexFieldD
return AbstractAtomicOrdinalsFieldData.empty();
}
protected TermsEnum filter(Terms terms, LeafReader reader) throws IOException {
TermsEnum iterator = terms.iterator();
protected TermsEnum filter(Terms terms, TermsEnum iterator, LeafReader reader) throws IOException {
if (iterator == null) {
return null;
}
if (iterator != null && frequency != null) {
iterator = FrequencyFilter.filter(iterator, terms, reader, frequency);
int docCount = terms.getDocCount();
if (docCount == -1) {
docCount = reader.maxDoc();
}
if (iterator != null && regex != null) {
iterator = RegexFilter.filter(iterator, terms, reader, regex);
if (docCount >= minSegmentSize) {
final int minFreq = minFrequency > 1.0
? (int) minFrequency
: (int)(docCount * minFrequency);
final int maxFreq = maxFrequency > 1.0
? (int) maxFrequency
: (int)(docCount * maxFrequency);
if (minFreq > 1 || maxFreq < docCount) {
iterator = new FrequencyFilter(iterator, minFreq, maxFreq);
}
}
return iterator;
}
@ -135,25 +137,6 @@ public abstract class AbstractIndexOrdinalsFieldData extends AbstractIndexFieldD
this.maxFreq = maxFreq;
}
public static TermsEnum filter(TermsEnum toFilter, Terms terms, LeafReader reader, Settings settings) throws IOException {
int docCount = terms.getDocCount();
if (docCount == -1) {
docCount = reader.maxDoc();
}
final double minFrequency = settings.getAsDouble("min", 0d);
final double maxFrequency = settings.getAsDouble("max", docCount+1d);
final double minSegmentSize = settings.getAsInt("min_segment_size", 0);
if (minSegmentSize < docCount) {
final int minFreq = minFrequency > 1.0? (int) minFrequency : (int)(docCount * minFrequency);
final int maxFreq = maxFrequency > 1.0? (int) maxFrequency : (int)(docCount * maxFrequency);
assert minFreq < maxFreq;
return new FrequencyFilter(toFilter, minFreq, maxFreq);
}
return toFilter;
}
@Override
protected AcceptStatus accept(BytesRef arg0) throws IOException {
int docFreq = docFreq();
@ -164,33 +147,4 @@ public abstract class AbstractIndexOrdinalsFieldData extends AbstractIndexFieldD
}
}
private static final class RegexFilter extends FilteredTermsEnum {
private final Matcher matcher;
private final CharsRefBuilder spare = new CharsRefBuilder();
public RegexFilter(TermsEnum delegate, Matcher matcher) {
super(delegate, false);
this.matcher = matcher;
}
public static TermsEnum filter(TermsEnum iterator, Terms terms, LeafReader reader, Settings regex) {
String pattern = regex.get("pattern");
if (pattern == null) {
return iterator;
}
Pattern p = Pattern.compile(pattern);
return new RegexFilter(iterator, p.matcher(""));
}
@Override
protected AcceptStatus accept(BytesRef arg0) throws IOException {
spare.copyUTF8Bytes(arg0);
matcher.reset(spare.get());
if (matcher.matches()) {
return AcceptStatus.YES;
}
return AcceptStatus.NO;
}
}
}

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
@ -29,8 +28,8 @@ import org.elasticsearch.search.MultiValueMode;
public class BinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData<BinaryDVAtomicFieldData> {
public BinaryDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) {
super(index, fieldName, fieldDataType);
public BinaryDVIndexFieldData(Index index, String fieldName) {
super(index, fieldName);
}
@Override

View File

@ -24,7 +24,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
@ -37,8 +36,8 @@ import java.io.IOException;
public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData<BytesBinaryDVAtomicFieldData> {
public BytesBinaryDVIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) {
super(index, fieldName, fieldDataType);
public BytesBinaryDVIndexFieldData(Index index, String fieldName) {
super(index, fieldName);
}
@Override
@ -67,7 +66,7 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore breaker
final String fieldName = fieldType.name();
return new BytesBinaryDVIndexFieldData(indexSettings.getIndex(), fieldName, fieldType.fieldDataType());
return new BytesBinaryDVIndexFieldData(indexSettings.getIndex(), fieldName);
}
}

View File

@ -25,7 +25,6 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
@ -46,14 +45,12 @@ public abstract class DocValuesIndexFieldData {
protected final Index index;
protected final String fieldName;
protected final FieldDataType fieldDataType;
protected final ESLogger logger;
public DocValuesIndexFieldData(Index index, String fieldName, FieldDataType fieldDataType) {
public DocValuesIndexFieldData(Index index, String fieldName) {
super();
this.index = index;
this.fieldName = fieldName;
this.fieldDataType = fieldDataType;
this.logger = Loggers.getLogger(getClass());
}
@ -61,10 +58,6 @@ public abstract class DocValuesIndexFieldData {
return fieldName;
}
public final FieldDataType getFieldDataType() {
return fieldDataType;
}
public final void clear() {
// can't do
}
@ -92,19 +85,13 @@ public abstract class DocValuesIndexFieldData {
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore Circuit Breaker
final String fieldName = fieldType.name();
final Settings fdSettings = fieldType.fieldDataType().getSettings();
final Map<String, Settings> filter = fdSettings.getGroups("filter");
if (filter != null && !filter.isEmpty()) {
throw new IllegalArgumentException("Doc values field data doesn't support filters [" + fieldName + "]");
}
if (BINARY_INDEX_FIELD_NAMES.contains(fieldName)) {
assert numericType == null;
return new BinaryDVIndexFieldData(indexSettings.getIndex(), fieldName, fieldType.fieldDataType());
return new BinaryDVIndexFieldData(indexSettings.getIndex(), fieldName);
} else if (numericType != null) {
return new SortedNumericDVIndexFieldData(indexSettings.getIndex(), fieldName, numericType, fieldType.fieldDataType());
return new SortedNumericDVIndexFieldData(indexSettings.getIndex(), fieldName, numericType);
} else {
return new SortedSetDVOrdinalsIndexFieldData(indexSettings, cache, fieldName, breakerService, fieldType.fieldDataType());
return new SortedSetDVOrdinalsIndexFieldData(indexSettings, cache, fieldName, breakerService);
}
}

View File

@ -35,7 +35,6 @@ import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
@ -50,18 +49,9 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService;
public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData {
private final CircuitBreakerService breakerService;
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new GeoPointArrayIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache,
breakerService);
}
}
public GeoPointArrayIndexFieldData(IndexSettings indexSettings, String fieldName,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(indexSettings, fieldName, fieldDataType, cache);
IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(indexSettings, fieldName, cache);
this.breakerService = breakerService;
}
@ -88,8 +78,7 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData
private AtomicGeoPointFieldData loadFieldData22(LeafReader reader, NonEstimatingEstimator estimator, Terms terms,
AtomicGeoPointFieldData data) throws Exception {
LongArray indexedPoints = BigArrays.NON_RECYCLING_INSTANCE.newLongArray(128);
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio",
OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
final float acceptableTransientOverheadRatio = OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO;
boolean success = false;
try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) {
final TermsEnum termsEnum;
@ -112,10 +101,9 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData
}
indexedPoints = BigArrays.NON_RECYCLING_INSTANCE.resize(indexedPoints, numTerms);
Ordinals build = builder.build(fieldDataType.getSettings());
Ordinals build = builder.build();
RandomAccessOrds ordinals = build.ordinals();
if (!(FieldData.isMultiValued(ordinals) || CommonSettings.getMemoryStorageHint(fieldDataType) == CommonSettings
.MemoryStorageFormat.ORDINALS)) {
if (FieldData.isMultiValued(ordinals) == false) {
int maxDoc = reader.maxDoc();
LongArray sIndexedPoint = BigArrays.NON_RECYCLING_INSTANCE.newLongArray(reader.maxDoc());
for (int i=0; i<maxDoc; ++i) {
@ -146,9 +134,9 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData
AtomicGeoPointFieldData data) throws Exception {
DoubleArray lat = BigArrays.NON_RECYCLING_INSTANCE.newDoubleArray(128);
DoubleArray lon = BigArrays.NON_RECYCLING_INSTANCE.newDoubleArray(128);
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
final float acceptableTransientOverheadRatio = OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO;
boolean success = false;
try (OrdinalsBuilder builder = new OrdinalsBuilder(terms.size(), reader.maxDoc(), acceptableTransientOverheadRatio)) {
try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) {
final GeoPointTermsEnumLegacy iter = new GeoPointTermsEnumLegacy(builder.buildFromTerms(terms.iterator()));
GeoPoint point;
long numTerms = 0;
@ -162,9 +150,9 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData
lat = BigArrays.NON_RECYCLING_INSTANCE.resize(lat, numTerms);
lon = BigArrays.NON_RECYCLING_INSTANCE.resize(lon, numTerms);
Ordinals build = builder.build(fieldDataType.getSettings());
Ordinals build = builder.build();
RandomAccessOrds ordinals = build.ordinals();
if (!(FieldData.isMultiValued(ordinals) || CommonSettings.getMemoryStorageHint(fieldDataType) == CommonSettings.MemoryStorageFormat.ORDINALS)) {
if (FieldData.isMultiValued(ordinals) == false) {
int maxDoc = reader.maxDoc();
DoubleArray sLat = BigArrays.NON_RECYCLING_INSTANCE.newDoubleArray(reader.maxDoc());
DoubleArray sLon = BigArrays.NON_RECYCLING_INSTANCE.newDoubleArray(reader.maxDoc());

View File

@ -28,12 +28,12 @@ import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import java.util.Collection;
@ -101,7 +101,10 @@ public class IndexIndexFieldData extends AbstractIndexOrdinalsFieldData {
private final AtomicOrdinalsFieldData atomicFieldData;
private IndexIndexFieldData(IndexSettings indexSettings, String name) {
super(indexSettings, name, new FieldDataType("string"), null, null);
super(indexSettings, name, null, null,
TextFieldMapper.Defaults.FIELDDATA_MIN_FREQUENCY,
TextFieldMapper.Defaults.FIELDDATA_MAX_FREQUENCY,
TextFieldMapper.Defaults.FIELDDATA_MIN_SEGMENT_SIZE);
atomicFieldData = new IndexAtomicFieldData(index().getName());
}

View File

@ -34,7 +34,6 @@ import org.apache.lucene.util.packed.PackedLongValues;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
@ -54,16 +53,27 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
public static class Builder implements IndexFieldData.Builder {
private final double minFrequency, maxFrequency;
private final int minSegmentSize;
public Builder(double minFrequency, double maxFrequency, int minSegmentSize) {
this.minFrequency = minFrequency;
this.maxFrequency = maxFrequency;
this.minSegmentSize = minSegmentSize;
}
@Override
public IndexOrdinalsFieldData build(IndexSettings indexSettings, MappedFieldType fieldType,
IndexFieldDataCache cache, CircuitBreakerService breakerService, MapperService mapperService) {
return new PagedBytesIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache, breakerService);
return new PagedBytesIndexFieldData(indexSettings, fieldType.name(), cache, breakerService,
minFrequency, maxFrequency, minSegmentSize);
}
}
public PagedBytesIndexFieldData(IndexSettings indexSettings, String fieldName,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
super(indexSettings, fieldName, fieldDataType, cache, breakerService);
IndexFieldDataCache cache, CircuitBreakerService breakerService,
double minFrequency, double maxFrequency, int minSegmentSize) {
super(indexSettings, fieldName, cache, breakerService, minFrequency, maxFrequency, minSegmentSize);
}
@Override
@ -82,14 +92,7 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
final PagedBytes bytes = new PagedBytes(15);
final PackedLongValues.Builder termOrdToBytesOffset = PackedLongValues.monotonicBuilder(PackedInts.COMPACT);
final long numTerms;
if (regex == null && frequency == null) {
numTerms = terms.size();
} else {
numTerms = -1;
}
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat(
FilterSettingFields.ACCEPTABLE_TRANSIENT_OVERHEAD_RATIO, OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
final float acceptableTransientOverheadRatio = OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO;
// Wrap the context in an estimator and use it to either estimate
// the entire set, or wrap the TermsEnum so it can be calculated
@ -98,7 +101,7 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
TermsEnum termsEnum = estimator.beforeLoad(terms);
boolean success = false;
try (OrdinalsBuilder builder = new OrdinalsBuilder(numTerms, reader.maxDoc(), acceptableTransientOverheadRatio)) {
try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) {
PostingsEnum docsEnum = null;
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.next()) {
final long termOrd = builder.nextOrdinal();
@ -110,7 +113,7 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
}
}
PagedBytes.Reader bytesReader = bytes.freeze(true);
final Ordinals ordinals = builder.build(fieldDataType.getSettings());
final Ordinals ordinals = builder.build();
data = new PagedBytesAtomicFieldData(bytesReader, termOrdToBytesOffset.build(), ordinals);
success = true;
@ -199,33 +202,28 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
*/
@Override
public TermsEnum beforeLoad(Terms terms) throws IOException {
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat(
FilterSettingFields.ACCEPTABLE_TRANSIENT_OVERHEAD_RATIO,
OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
LeafReader reader = context.reader();
// Check if one of the following is present:
// - The OrdinalsBuilder overhead has been tweaked away from the default
// - A field data filter is present
// - A regex filter is present
if (acceptableTransientOverheadRatio != OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO ||
fieldDataType.getSettings().getAsDouble(FilterSettingFields.FREQUENCY_MIN, 0d) != 0d ||
fieldDataType.getSettings().getAsDouble(FilterSettingFields.FREQUENCY_MAX, 0d) != 0d ||
fieldDataType.getSettings().getAsDouble(FilterSettingFields.FREQUENCY_MIN_SEGMENT_SIZE, 0d) != 0d ||
fieldDataType.getSettings().get(FilterSettingFields.REGEX_PATTERN) != null) {
TermsEnum iterator = terms.iterator();
TermsEnum filteredIterator = filter(terms, iterator, reader);
final boolean filtered = iterator != filteredIterator;
iterator = filteredIterator;
if (filtered) {
if (logger.isTraceEnabled()) {
logger.trace("Filter exists, can't circuit break normally, using RamAccountingTermsEnum");
}
return new RamAccountingTermsEnum(filter(terms, reader), breaker, this, this.fieldName);
return new RamAccountingTermsEnum(iterator, breaker, this, this.fieldName);
} else {
estimatedBytes = this.estimateStringFieldData();
// If we weren't able to estimate, wrap in the RamAccountingTermsEnum
if (estimatedBytes == 0) {
return new RamAccountingTermsEnum(filter(terms, reader), breaker, this, this.fieldName);
iterator = new RamAccountingTermsEnum(iterator, breaker, this, this.fieldName);
} else {
breaker.addEstimateBytesAndMaybeBreak(estimatedBytes, fieldName);
}
breaker.addEstimateBytesAndMaybeBreak(estimatedBytes, fieldName);
return filter(terms, reader);
return iterator;
}
}
@ -256,12 +254,4 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
breaker.addWithoutBreaking(actualUsed);
}
}
static final class FilterSettingFields {
static final String ACCEPTABLE_TRANSIENT_OVERHEAD_RATIO = "acceptable_transient_overhead_ratio";
static final String FREQUENCY_MIN = "filter.frequency.min";
static final String FREQUENCY_MAX = "filter.frequency.max";
static final String FREQUENCY_MIN_SEGMENT_SIZE = "filter.frequency.min_segment_size";
static final String REGEX_PATTERN = "filter.regex.pattern";
}
}

View File

@ -40,7 +40,6 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
@ -75,9 +74,9 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
private final CircuitBreakerService breakerService;
public ParentChildIndexFieldData(IndexSettings indexSettings, String fieldName,
FieldDataType fieldDataType, IndexFieldDataCache cache, MapperService mapperService,
IndexFieldDataCache cache, MapperService mapperService,
CircuitBreakerService breakerService) {
super(indexSettings, fieldName, fieldDataType, cache);
super(indexSettings, fieldName, cache);
this.breakerService = breakerService;
Set<String> parentTypes = new HashSet<>();
for (DocumentMapper mapper : mapperService.docMappers(false)) {
@ -146,7 +145,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
MappedFieldType fieldType,
IndexFieldDataCache cache, CircuitBreakerService breakerService,
MapperService mapperService) {
return new ParentChildIndexFieldData(indexSettings, fieldType.name(), fieldType.fieldDataType(), cache,
return new ParentChildIndexFieldData(indexSettings, fieldType.name(), cache,
mapperService, breakerService);
}
}
@ -324,11 +323,6 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData<AtomicPare
return ParentChildIndexFieldData.this.getFieldName();
}
@Override
public FieldDataType getFieldDataType() {
return ParentChildIndexFieldData.this.getFieldDataType();
}
@Override
public AtomicParentChildFieldData load(LeafReaderContext context) {
assert context.reader().getCoreCacheKey() == leaves.get(context.ord).reader().getCoreCacheKey();

View File

@ -30,7 +30,6 @@ import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
@ -51,8 +50,8 @@ import java.util.Collections;
public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData implements IndexNumericFieldData {
private final NumericType numericType;
public SortedNumericDVIndexFieldData(Index index, String fieldNames, NumericType numericType, FieldDataType fieldDataType) {
super(index, fieldNames, fieldDataType);
public SortedNumericDVIndexFieldData(Index index, String fieldNames, NumericType numericType) {
super(index, fieldNames);
if (numericType == null) {
throw new IllegalArgumentException("numericType must be non-null");
}

View File

@ -24,7 +24,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
@ -42,8 +41,8 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i
private final IndexFieldDataCache cache;
private final CircuitBreakerService breakerService;
public SortedSetDVOrdinalsIndexFieldData(IndexSettings indexSettings, IndexFieldDataCache cache, String fieldName, CircuitBreakerService breakerService, FieldDataType fieldDataType) {
super(indexSettings.getIndex(), fieldName, fieldDataType);
public SortedSetDVOrdinalsIndexFieldData(IndexSettings indexSettings, IndexFieldDataCache cache, String fieldName, CircuitBreakerService breakerService) {
super(indexSettings.getIndex(), fieldName);
this.indexSettings = indexSettings;
this.cache = cache;
this.breakerService = breakerService;

View File

@ -25,8 +25,6 @@ import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
@ -140,19 +138,19 @@ public class DocumentMapperParser {
return docBuilder.build(mapperService);
}
public static void checkNoRemainingFields(String fieldName, Map<String, Object> fieldNodeMap, Version indexVersionCreated) {
public static void checkNoRemainingFields(String fieldName, Map<?, ?> fieldNodeMap, Version indexVersionCreated) {
checkNoRemainingFields(fieldNodeMap, indexVersionCreated, "Mapping definition for [" + fieldName + "] has unsupported parameters: ");
}
public static void checkNoRemainingFields(Map<String, Object> fieldNodeMap, Version indexVersionCreated, String message) {
public static void checkNoRemainingFields(Map<?, ?> fieldNodeMap, Version indexVersionCreated, String message) {
if (!fieldNodeMap.isEmpty()) {
throw new MapperParsingException(message + getRemainingFields(fieldNodeMap));
}
}
private static String getRemainingFields(Map<String, ?> map) {
private static String getRemainingFields(Map<?, ?> map) {
StringBuilder remainingFields = new StringBuilder();
for (String key : map.keySet()) {
for (Object key : map.keySet()) {
remainingFields.append(" [").append(key).append(" : ").append(map.get(key)).append("]");
}
return remainingFields.toString();

View File

@ -705,7 +705,7 @@ final class DocumentParser implements Closeable {
Double.parseDouble(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = new DoubleFieldMapper.Builder(currentFieldName);
builder = new FloatFieldMapper.Builder(currentFieldName);
}
return builder;
} catch (NumberFormatException e) {

View File

@ -25,7 +25,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Setting;
@ -33,7 +32,6 @@ import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.core.TypeParsers;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.similarity.SimilarityProvider;
@ -64,8 +62,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
protected Boolean includeInAll;
protected boolean indexOptionsSet = false;
protected boolean docValuesSet = false;
@Nullable
protected Settings fieldDataSettings;
protected final MultiFields.Builder multiFieldsBuilder;
protected CopyTo copyTo;
@ -203,11 +199,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
return builder;
}
public T fieldDataSettings(Settings settings) {
this.fieldDataSettings = settings;
return builder;
}
public Builder nullValue(Object nullValue) {
this.fieldType.setNullValue(nullValue);
return this;
@ -245,10 +236,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
fieldType.setIndexAnalyzer(Lucene.KEYWORD_ANALYZER);
fieldType.setSearchAnalyzer(Lucene.KEYWORD_ANALYZER);
}
if (fieldDataSettings != null) {
Settings settings = Settings.builder().put(fieldType.fieldDataType().getSettings()).put(fieldDataSettings).build();
fieldType.setFieldDataType(new FieldDataType(fieldType.fieldDataType().getType(), settings));
}
boolean defaultDocValues = defaultDocValues(context.indexCreatedVersion());
defaultFieldType.setHasDocValues(defaultDocValues);
if (docValuesSet == false) {
@ -423,6 +410,9 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
if (indexed && (includeDefaults || fieldType().indexOptions() != defaultFieldType.indexOptions())) {
builder.field("index_options", indexOptionToString(fieldType().indexOptions()));
}
if (includeDefaults || fieldType().eagerGlobalOrdinals() != defaultFieldType.eagerGlobalOrdinals()) {
builder.field("eager_global_ordinals", fieldType().eagerGlobalOrdinals());
}
if (fieldType().similarity() != null) {
builder.field("similarity", fieldType().similarity().name());
@ -430,9 +420,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
builder.field("similarity", SimilarityService.DEFAULT_SIMILARITY);
}
if (includeDefaults || hasCustomFieldDataSettings()) {
builder.field("fielddata", fieldType().fieldDataType().getSettings().getAsMap());
}
multiFields.toXContent(builder, params);
if (copyTo != null) {
@ -512,10 +499,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
return indexed;
}
protected boolean hasCustomFieldDataSettings() {
return fieldType().fieldDataType() != null && fieldType().fieldDataType().equals(defaultFieldType.fieldDataType()) == false;
}
protected abstract String contentType();
public static class MultiFields {

View File

@ -37,11 +37,10 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.similarity.SimilarityProvider;
@ -55,46 +54,6 @@ import java.util.Objects;
*/
public abstract class MappedFieldType extends FieldType {
public enum Loading {
LAZY {
@Override
public String toString() {
return LAZY_VALUE;
}
},
EAGER {
@Override
public String toString() {
return EAGER_VALUE;
}
},
EAGER_GLOBAL_ORDINALS {
@Override
public String toString() {
return EAGER_GLOBAL_ORDINALS_VALUE;
}
};
public static final String KEY = "loading";
public static final String EAGER_GLOBAL_ORDINALS_VALUE = "eager_global_ordinals";
public static final String EAGER_VALUE = "eager";
public static final String LAZY_VALUE = "lazy";
public static Loading parse(String loading, Loading defaultValue) {
if (Strings.isNullOrEmpty(loading)) {
return defaultValue;
} else if (EAGER_GLOBAL_ORDINALS_VALUE.equalsIgnoreCase(loading)) {
return EAGER_GLOBAL_ORDINALS;
} else if (EAGER_VALUE.equalsIgnoreCase(loading)) {
return EAGER;
} else if (LAZY_VALUE.equalsIgnoreCase(loading)) {
return LAZY;
} else {
throw new MapperParsingException("Unknown [" + KEY + "] value: [" + loading + "]");
}
}
}
private String name;
private float boost;
// TODO: remove this docvalues flag and use docValuesType
@ -103,9 +62,9 @@ public abstract class MappedFieldType extends FieldType {
private NamedAnalyzer searchAnalyzer;
private NamedAnalyzer searchQuoteAnalyzer;
private SimilarityProvider similarity;
private FieldDataType fieldDataType;
private Object nullValue;
private String nullValueAsString; // for sending null value to _all field
private boolean eagerGlobalOrdinals;
protected MappedFieldType(MappedFieldType ref) {
super(ref);
@ -116,9 +75,9 @@ public abstract class MappedFieldType extends FieldType {
this.searchAnalyzer = ref.searchAnalyzer();
this.searchQuoteAnalyzer = ref.searchQuoteAnalyzer();
this.similarity = ref.similarity();
this.fieldDataType = ref.fieldDataType();
this.nullValue = ref.nullValue();
this.nullValueAsString = ref.nullValueAsString();
this.eagerGlobalOrdinals = ref.eagerGlobalOrdinals;
}
public MappedFieldType() {
@ -128,12 +87,16 @@ public abstract class MappedFieldType extends FieldType {
setOmitNorms(false);
setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
setBoost(1.0f);
fieldDataType = new FieldDataType(typeName());
}
@Override
public abstract MappedFieldType clone();
/** Return a fielddata builder for this field. */
public IndexFieldData.Builder fielddataBuilder() {
throw new IllegalArgumentException("Fielddata is not supported on fields of type [" + typeName() + "]");
}
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
@ -156,7 +119,7 @@ public abstract class MappedFieldType extends FieldType {
Objects.equals(indexAnalyzer, fieldType.indexAnalyzer) &&
Objects.equals(searchAnalyzer, fieldType.searchAnalyzer) &&
Objects.equals(searchQuoteAnalyzer(), fieldType.searchQuoteAnalyzer()) &&
Objects.equals(fieldDataType, fieldType.fieldDataType) &&
Objects.equals(eagerGlobalOrdinals, fieldType.eagerGlobalOrdinals) &&
Objects.equals(nullValue, fieldType.nullValue) &&
Objects.equals(nullValueAsString, fieldType.nullValueAsString);
}
@ -164,7 +127,7 @@ public abstract class MappedFieldType extends FieldType {
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), name, boost, docValues, indexAnalyzer, searchAnalyzer, searchQuoteAnalyzer,
similarity == null ? null : similarity.name(), fieldDataType, nullValue, nullValueAsString);
eagerGlobalOrdinals, similarity == null ? null : similarity.name(), nullValue, nullValueAsString);
}
// norelease: we need to override freeze() and add safety checks that all settings are actually set
@ -245,12 +208,12 @@ public abstract class MappedFieldType extends FieldType {
if (Objects.equals(searchQuoteAnalyzer(), other.searchQuoteAnalyzer()) == false) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [search_quote_analyzer] across all types.");
}
if (Objects.equals(fieldDataType(), other.fieldDataType()) == false) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [fielddata] across all types.");
}
if (Objects.equals(nullValue(), other.nullValue()) == false) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [null_value] across all types.");
}
if (eagerGlobalOrdinals() != other.eagerGlobalOrdinals()) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [eager_global_ordinals] across all types.");
}
}
}
@ -280,15 +243,6 @@ public abstract class MappedFieldType extends FieldType {
this.boost = boost;
}
public FieldDataType fieldDataType() {
return fieldDataType;
}
public void setFieldDataType(FieldDataType fieldDataType) {
checkIfFrozen();
this.fieldDataType = fieldDataType;
}
public boolean hasDocValues() {
return docValues;
}
@ -453,4 +407,21 @@ public abstract class MappedFieldType extends FieldType {
public Query queryStringTermQuery(Term term) {
return null;
}
protected final void failIfNoDocValues() {
if (hasDocValues() == false) {
throw new IllegalStateException("Can't load fielddata on [" + name()
+ "] because fielddata is unsupported on fields of type ["
+ typeName() + "]. Use doc values instead.");
}
}
public boolean eagerGlobalOrdinals() {
return eagerGlobalOrdinals;
}
public void setEagerGlobalOrdinals(boolean eagerGlobalOrdinals) {
checkIfFrozen();
this.eagerGlobalOrdinals = eagerGlobalOrdinals;
}
}

View File

@ -32,6 +32,8 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@ -131,6 +133,12 @@ public class BinaryFieldMapper extends FieldMapper {
public Object valueForSearch(Object value) {
return value(value);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new BytesBinaryDVIndexFieldData.Builder();
}
}
protected BinaryFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,

View File

@ -29,6 +29,9 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@ -191,6 +194,12 @@ public class BooleanFieldMapper extends FieldMapper {
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder().numericType(NumericType.BOOLEAN);
}
}
protected BooleanFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,

View File

@ -38,6 +38,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -185,6 +188,12 @@ public class ByteFieldMapper extends NumberFieldMapper {
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder().numericType(NumericType.BYTE);
}
}
protected ByteFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,

View File

@ -183,7 +183,6 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
private ContextMappings contextMappings = null;
public CompletionFieldType() {
setFieldDataType(null);
}
private CompletionFieldType(CompletionFieldType ref) {

View File

@ -44,7 +44,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericDateAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -252,7 +254,6 @@ public class DateFieldMapper extends NumberFieldMapper {
public DateFieldType() {
super(LegacyNumericType.LONG);
setFieldDataType(new FieldDataType("long"));
}
protected DateFieldType(DateFieldType ref) {
@ -434,6 +435,12 @@ public class DateFieldMapper extends NumberFieldMapper {
}
return dateParser.parse(strValue, now(), inclusive, zone);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG);
}
}
protected DateFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit<Boolean> ignoreMalformed,Explicit<Boolean> coerce,

View File

@ -40,6 +40,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericDoubleAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -188,6 +191,12 @@ public class DoubleFieldMapper extends NumberFieldMapper {
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder().numericType(NumericType.DOUBLE);
}
}
protected DoubleFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Explicit<Boolean> ignoreMalformed,

View File

@ -41,6 +41,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -189,6 +192,12 @@ public class FloatFieldMapper extends NumberFieldMapper {
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder().numericType(NumericType.FLOAT);
}
}
protected FloatFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,

View File

@ -40,6 +40,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -194,6 +197,12 @@ public class IntegerFieldMapper extends NumberFieldMapper {
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder().numericType(NumericType.INT);
}
}
protected IntegerFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,

View File

@ -29,6 +29,9 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@ -92,6 +95,11 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
return super.indexOptions(indexOptions);
}
public Builder eagerGlobalOrdinals(boolean eagerGlobalOrdinals) {
fieldType().setEagerGlobalOrdinals(eagerGlobalOrdinals);
return builder;
}
@Override
public KeywordFieldMapper build(BuilderContext context) {
setupFieldType(context);
@ -123,6 +131,9 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
} else if (propName.equals("norms")) {
builder.omitNorms(XContentMapValues.nodeBooleanValue(propNode) == false);
iterator.remove();
} else if (propName.equals("eager_global_ordinals")) {
builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
}
@ -163,6 +174,12 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
}
return termQuery(nullValue(), null);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder();
}
}
private Boolean includeInAll;
@ -175,6 +192,13 @@ public final class KeywordFieldMapper extends FieldMapper implements AllFieldMap
this.ignoreAbove = ignoreAbove;
}
/** Values that have more chars than the return value of this method will
* be skipped at parsing time. */
// pkg-private for testing
int ignoreAbove() {
return ignoreAbove;
}
@Override
protected KeywordFieldMapper clone() {
return (KeywordFieldMapper) super.clone();

View File

@ -40,6 +40,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericLongAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -192,6 +195,12 @@ public class LongFieldMapper extends NumberFieldMapper {
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG);
}
}
protected LongFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,

View File

@ -39,6 +39,9 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;

View File

@ -40,6 +40,9 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericIntegerAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -190,6 +193,12 @@ public class ShortFieldMapper extends NumberFieldMapper {
maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue
);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder().numericType(NumericType.SHORT);
}
}
protected ShortFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,

View File

@ -34,6 +34,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@ -47,6 +51,7 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import static org.apache.lucene.index.IndexOptions.NONE;
@ -58,12 +63,20 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
public static final String CONTENT_TYPE = "string";
private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1;
// If a string field is created on 5.x and all parameters are in this list then we
// will automatically upgrade to a text/keyword field. Otherwise we will just fail
// saying that string fields are not supported anymore.
private static final Set<String> SUPPORTED_PARAMETERS_FOR_AUTO_UPGRADE = new HashSet<>(Arrays.asList(
"type",
// most common parameters, for which the upgrade is straightforward
"index", "store", "doc_values", "omit_norms", "norms", "fields", "copy_to"));
"index", "store", "doc_values", "omit_norms", "norms", "fields", "copy_to",
"fielddata", "ignore_above"));
public static class Defaults {
public static double FIELDDATA_MIN_FREQUENCY = 0;
public static double FIELDDATA_MAX_FREQUENCY = Integer.MAX_VALUE;
public static int FIELDDATA_MIN_SEGMENT_SIZE = 0;
public static final MappedFieldType FIELD_TYPE = new StringFieldType();
static {
@ -94,6 +107,11 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
builder = this;
}
@Override
public StringFieldType fieldType() {
return (StringFieldType) super.fieldType();
}
@Override
public Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) {
super.searchAnalyzer(searchAnalyzer);
@ -110,6 +128,31 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
return this;
}
public Builder fielddata(boolean fielddata) {
fieldType().setFielddata(fielddata);
return builder;
}
public Builder eagerGlobalOrdinals(boolean eagerGlobalOrdinals) {
fieldType().setEagerGlobalOrdinals(eagerGlobalOrdinals);
return builder;
}
public Builder fielddataFrequencyFilter(double minFreq, double maxFreq, int minSegmentSize) {
fieldType().setFielddataMinFrequency(minFreq);
fieldType().setFielddataMaxFrequency(maxFreq);
fieldType().setFielddataMinSegmentSize(minSegmentSize);
return builder;
}
@Override
protected void setupFieldType(BuilderContext context) {
super.setupFieldType(context);
if (fieldType().hasDocValues() && ((StringFieldType) fieldType()).fielddata()) {
((StringFieldType) fieldType()).setFielddata(false);
}
}
@Override
public StringFieldMapper build(BuilderContext context) {
if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
@ -134,7 +177,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
}
setupFieldType(context);
StringFieldMapper fieldMapper = new StringFieldMapper(
name, fieldType, defaultFieldType, positionIncrementGap, ignoreAbove,
name, fieldType(), defaultFieldType, positionIncrementGap, ignoreAbove,
context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo);
return fieldMapper.includeInAll(includeInAll);
}
@ -175,6 +218,28 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
node.put("norms", TypeParsers.nodeBooleanValue("omit_norms", omitNorms, parserContext) == false);
}
}
{
// upgrade fielddata settings
Object fielddataO = node.get("fielddata");
if (fielddataO instanceof Map) {
Map<?,?> fielddata = (Map<?, ?>) fielddataO;
if (keyword == false) {
node.put("fielddata", "disabled".equals(fielddata.get("format")) == false);
Map<?,?> fielddataFilter = (Map<?, ?>) fielddata.get("filter");
if (fielddataFilter != null) {
Map<?,?> frequencyFilter = (Map<?, ?>) fielddataFilter.get("frequency");
frequencyFilter.keySet().retainAll(Arrays.asList("min", "max", "min_segment_size"));
node.put("fielddata_frequency_filter", frequencyFilter);
}
} else {
node.remove("fielddata");
}
final Object loading = fielddata.get("loading");
if (loading != null) {
node.put("eager_global_ordinals", "eager_global_ordinals".equals(loading));
}
}
}
if (keyword) {
return new KeywordFieldMapper.TypeParser().parse(fieldName, node, parserContext);
} else {
@ -185,6 +250,7 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
throw new IllegalArgumentException("The [string] type is removed in 5.0. You should now use either a [text] "
+ "or [keyword] field instead for field [" + fieldName + "]");
}
StringFieldMapper.Builder builder = new StringFieldMapper.Builder(fieldName);
// hack for the fact that string can't just accept true/false for
// the index property and still accepts no/not_analyzed/analyzed
@ -207,6 +273,21 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
throw new IllegalArgumentException("Can't parse [index] value [" + index + "] for field [" + fieldName + "], expected [true], [false], [no], [not_analyzed] or [analyzed]");
}
}
final Object fielddataObject = node.get("fielddata");
if (fielddataObject instanceof Map) {
Map<?,?> fielddata = (Map<?, ?>) fielddataObject;
final Object loading = fielddata.get("loading");
if (loading != null) {
node.put("eager_global_ordinals", "eager_global_ordinals".equals(loading));
}
Map<?,?> fielddataFilter = (Map<?, ?>) fielddata.get("filter");
if (fielddataFilter != null) {
Map<?,?> frequencyFilter = (Map<?, ?>) fielddataFilter.get("frequency");
frequencyFilter.keySet().retainAll(Arrays.asList("min", "max", "min_segment_size"));
node.put("fielddata_frequency_filter", frequencyFilter);
}
node.put("fielddata", "disabled".equals(fielddata.get("format")) == false);
}
parseTextField(builder, fieldName, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
@ -239,6 +320,20 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
} else if (propName.equals("ignore_above")) {
builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1));
iterator.remove();
} else if (propName.equals("fielddata")) {
builder.fielddata(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("eager_global_ordinals")) {
builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("fielddata_frequency_filter")) {
Map<?,?> frequencyFilter = (Map<?, ?>) propNode;
double minFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("min"), 0);
double maxFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("max"), Integer.MAX_VALUE);
int minSegmentSize = XContentMapValues.nodeIntegerValue(frequencyFilter.remove("min_segment_size"), 0);
builder.fielddataFrequencyFilter(minFrequency, maxFrequency, minSegmentSize);
DocumentMapperParser.checkNoRemainingFields(propName, frequencyFilter, parserContext.indexVersionCreated());
iterator.remove();
} else if (parseMultiField(builder, fieldName, parserContext, propName, propNode)) {
iterator.remove();
}
@ -249,10 +344,42 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
public static final class StringFieldType extends MappedFieldType {
public StringFieldType() {}
private boolean fielddata;
private double fielddataMinFrequency;
private double fielddataMaxFrequency;
private int fielddataMinSegmentSize;
public StringFieldType() {
fielddata = true;
fielddataMinFrequency = Defaults.FIELDDATA_MIN_FREQUENCY;
fielddataMaxFrequency = Defaults.FIELDDATA_MAX_FREQUENCY;
fielddataMinSegmentSize = Defaults.FIELDDATA_MIN_SEGMENT_SIZE;
}
protected StringFieldType(StringFieldType ref) {
super(ref);
this.fielddata = ref.fielddata;
this.fielddataMinFrequency = ref.fielddataMinFrequency;
this.fielddataMaxFrequency = ref.fielddataMaxFrequency;
this.fielddataMinSegmentSize = ref.fielddataMinSegmentSize;
}
@Override
public boolean equals(Object o) {
if (super.equals(o) == false) {
return false;
}
StringFieldType that = (StringFieldType) o;
return fielddata == that.fielddata
&& fielddataMinFrequency == that.fielddataMinFrequency
&& fielddataMaxFrequency == that.fielddataMaxFrequency
&& fielddataMinSegmentSize == that.fielddataMinSegmentSize;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), fielddata,
fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize);
}
public StringFieldType clone() {
@ -264,6 +391,67 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
return CONTENT_TYPE;
}
@Override
public void checkCompatibility(MappedFieldType other,
List<String> conflicts, boolean strict) {
super.checkCompatibility(other, conflicts, strict);
StringFieldType otherType = (StringFieldType) other;
if (strict) {
if (fielddata() != otherType.fielddata()) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [fielddata] "
+ "across all types.");
}
if (fielddataMinFrequency() != otherType.fielddataMinFrequency()) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update "
+ "[fielddata_frequency_filter.min] across all types.");
}
if (fielddataMaxFrequency() != otherType.fielddataMaxFrequency()) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update "
+ "[fielddata_frequency_filter.max] across all types.");
}
if (fielddataMinSegmentSize() != otherType.fielddataMinSegmentSize()) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update "
+ "[fielddata_frequency_filter.min_segment_size] across all types.");
}
}
}
public boolean fielddata() {
return fielddata;
}
public void setFielddata(boolean fielddata) {
checkIfFrozen();
this.fielddata = fielddata;
}
public double fielddataMinFrequency() {
return fielddataMinFrequency;
}
public void setFielddataMinFrequency(double fielddataMinFrequency) {
checkIfFrozen();
this.fielddataMinFrequency = fielddataMinFrequency;
}
public double fielddataMaxFrequency() {
return fielddataMaxFrequency;
}
public void setFielddataMaxFrequency(double fielddataMaxFrequency) {
checkIfFrozen();
this.fielddataMaxFrequency = fielddataMaxFrequency;
}
public int fielddataMinSegmentSize() {
return fielddataMinSegmentSize;
}
public void setFielddataMinSegmentSize(int fielddataMinSegmentSize) {
checkIfFrozen();
this.fielddataMinSegmentSize = fielddataMinSegmentSize;
}
@Override
public String value(Object value) {
if (value == null) {
@ -279,13 +467,26 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
}
return termQuery(nullValue(), null);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
if (hasDocValues()) {
return new DocValuesIndexFieldData.Builder();
} else if (fielddata) {
return new PagedBytesIndexFieldData.Builder(fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize);
} else {
throw new IllegalStateException("Fielddata is disabled on analyzed string fields by default. Set fielddata=true on ["
+ name() + "] in order to load fielddata in memory by uninverting the inverted index. Note that this can however "
+ "use significant memory.");
}
}
}
private Boolean includeInAll;
private int positionIncrementGap;
private int ignoreAbove;
protected StringFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
protected StringFieldMapper(String simpleName, StringFieldType fieldType, MappedFieldType defaultFieldType,
int positionIncrementGap, int ignoreAbove,
Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
@ -296,6 +497,12 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
if (fieldType.tokenized() && fieldType.indexOptions() != NONE && fieldType().hasDocValues()) {
throw new MapperParsingException("Field [" + fieldType.name() + "] cannot be analyzed and have doc values");
}
if (fieldType.hasDocValues() && (
fieldType.fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY
|| fieldType.fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY
|| fieldType.fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE)) {
throw new MapperParsingException("Field [" + fieldType.name() + "] cannot have doc values and use fielddata filtering");
}
this.positionIncrementGap = positionIncrementGap;
this.ignoreAbove = ignoreAbove;
}
@ -439,6 +646,11 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
}
}
@Override
public StringFieldType fieldType() {
return (StringFieldType) super.fieldType();
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
@ -460,6 +672,27 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
if (includeDefaults || ignoreAbove != Defaults.IGNORE_ABOVE) {
builder.field("ignore_above", ignoreAbove);
}
if (includeDefaults || fieldType().fielddata() != ((StringFieldType) defaultFieldType).fielddata()) {
builder.field("fielddata", fieldType().fielddata());
}
if (fieldType().fielddata()) {
if (includeDefaults
|| fieldType().fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY
|| fieldType().fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY
|| fieldType().fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE) {
builder.startObject("fielddata_frequency_filter");
if (includeDefaults || fieldType().fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY) {
builder.field("min", fieldType().fielddataMinFrequency());
}
if (includeDefaults || fieldType().fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY) {
builder.field("max", fieldType().fielddataMaxFrequency());
}
if (includeDefaults || fieldType().fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE) {
builder.field("min_segment_size", fieldType().fielddataMinSegmentSize());
}
builder.endObject();
}
}
}
/**

View File

@ -27,6 +27,9 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@ -38,6 +41,7 @@ import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseTextField;
@ -49,6 +53,10 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1;
public static class Defaults {
public static double FIELDDATA_MIN_FREQUENCY = 0;
public static double FIELDDATA_MAX_FREQUENCY = Integer.MAX_VALUE;
public static int FIELDDATA_MIN_SEGMENT_SIZE = 0;
public static final MappedFieldType FIELD_TYPE = new TextFieldType();
static {
@ -72,6 +80,11 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
builder = this;
}
@Override
public TextFieldType fieldType() {
return (TextFieldType) super.fieldType();
}
public Builder positionIncrementGap(int positionIncrementGap) {
if (positionIncrementGap < 0) {
throw new MapperParsingException("[positions_increment_gap] must be positive, got " + positionIncrementGap);
@ -80,6 +93,11 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
return this;
}
public Builder fielddata(boolean fielddata) {
fieldType().setFielddata(fielddata);
return builder;
}
@Override
public Builder docValues(boolean docValues) {
if (docValues) {
@ -88,6 +106,18 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
return super.docValues(docValues);
}
public Builder eagerGlobalOrdinals(boolean eagerGlobalOrdinals) {
fieldType().setEagerGlobalOrdinals(eagerGlobalOrdinals);
return builder;
}
public Builder fielddataFrequencyFilter(double minFreq, double maxFreq, int minSegmentSize) {
fieldType().setFielddataMinFrequency(minFreq);
fieldType().setFielddataMaxFrequency(maxFreq);
fieldType().setFielddataMinSegmentSize(minSegmentSize);
return builder;
}
@Override
public TextFieldMapper build(BuilderContext context) {
if (positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
@ -119,6 +149,20 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);
builder.positionIncrementGap(newPositionIncrementGap);
iterator.remove();
} else if (propName.equals("fielddata")) {
builder.fielddata(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("eager_global_ordinals")) {
builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("fielddata_frequency_filter")) {
Map<?,?> frequencyFilter = (Map<?, ?>) propNode;
double minFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("min"), 0);
double maxFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("max"), Integer.MAX_VALUE);
int minSegmentSize = XContentMapValues.nodeIntegerValue(frequencyFilter.remove("min_segment_size"), 0);
builder.fielddataFrequencyFilter(minFrequency, maxFrequency, minSegmentSize);
DocumentMapperParser.checkNoRemainingFields(propName, frequencyFilter, parserContext.indexVersionCreated());
iterator.remove();
} else if (parseMultiField(builder, fieldName, parserContext, propName, propNode)) {
iterator.remove();
}
@ -129,16 +173,110 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
public static final class TextFieldType extends MappedFieldType {
public TextFieldType() {}
private boolean fielddata;
private double fielddataMinFrequency;
private double fielddataMaxFrequency;
private int fielddataMinSegmentSize;
public TextFieldType() {
// TODO: change the default to false
fielddata = true;
fielddataMinFrequency = Defaults.FIELDDATA_MIN_FREQUENCY;
fielddataMaxFrequency = Defaults.FIELDDATA_MAX_FREQUENCY;
fielddataMinSegmentSize = Defaults.FIELDDATA_MIN_SEGMENT_SIZE;
}
protected TextFieldType(TextFieldType ref) {
super(ref);
this.fielddata = ref.fielddata;
this.fielddataMinFrequency = ref.fielddataMinFrequency;
this.fielddataMaxFrequency = ref.fielddataMaxFrequency;
this.fielddataMinSegmentSize = ref.fielddataMinSegmentSize;
}
public TextFieldType clone() {
return new TextFieldType(this);
}
@Override
public boolean equals(Object o) {
if (super.equals(o) == false) {
return false;
}
TextFieldType that = (TextFieldType) o;
return fielddata == that.fielddata
&& fielddataMinFrequency == that.fielddataMinFrequency
&& fielddataMaxFrequency == that.fielddataMaxFrequency
&& fielddataMinSegmentSize == that.fielddataMinSegmentSize;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), fielddata,
fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize);
}
@Override
public void checkCompatibility(MappedFieldType other,
List<String> conflicts, boolean strict) {
super.checkCompatibility(other, conflicts, strict);
TextFieldType otherType = (TextFieldType) other;
if (strict) {
if (fielddata() != otherType.fielddata()) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update [fielddata] "
+ "across all types.");
}
if (fielddataMinFrequency() != otherType.fielddataMinFrequency()) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update "
+ "[fielddata_frequency_filter.min] across all types.");
}
if (fielddataMaxFrequency() != otherType.fielddataMaxFrequency()) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update "
+ "[fielddata_frequency_filter.max] across all types.");
}
if (fielddataMinSegmentSize() != otherType.fielddataMinSegmentSize()) {
conflicts.add("mapper [" + name() + "] is used by multiple types. Set update_all_types to true to update "
+ "[fielddata_frequency_filter.min_segment_size] across all types.");
}
}
}
public boolean fielddata() {
return fielddata;
}
public void setFielddata(boolean fielddata) {
checkIfFrozen();
this.fielddata = fielddata;
}
public double fielddataMinFrequency() {
return fielddataMinFrequency;
}
public void setFielddataMinFrequency(double fielddataMinFrequency) {
checkIfFrozen();
this.fielddataMinFrequency = fielddataMinFrequency;
}
public double fielddataMaxFrequency() {
return fielddataMaxFrequency;
}
public void setFielddataMaxFrequency(double fielddataMaxFrequency) {
checkIfFrozen();
this.fielddataMaxFrequency = fielddataMaxFrequency;
}
public int fielddataMinSegmentSize() {
return fielddataMinSegmentSize;
}
public void setFielddataMinSegmentSize(int fielddataMinSegmentSize) {
checkIfFrozen();
this.fielddataMinSegmentSize = fielddataMinSegmentSize;
}
@Override
public String typeName() {
return CONTENT_TYPE;
@ -159,6 +297,16 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
}
return termQuery(nullValue(), null);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
if (fielddata == false) {
throw new IllegalStateException("Fielddata is disabled on text fields by default. Set fielddata=true on [" + name()
+ "] in order to load fielddata in memory by uninverting the inverted index. Note that this can however "
+ "use significant memory.");
}
return new PagedBytesIndexFieldData.Builder(fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize);
}
}
private Boolean includeInAll;
@ -249,6 +397,11 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
this.includeInAll = ((TextFieldMapper) mergeWith).includeInAll;
}
@Override
public TextFieldType fieldType() {
return (TextFieldType) super.fieldType();
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
@ -263,5 +416,27 @@ public class TextFieldMapper extends FieldMapper implements AllFieldMapper.Inclu
if (includeDefaults || positionIncrementGap != POSITION_INCREMENT_GAP_USE_ANALYZER) {
builder.field("position_increment_gap", positionIncrementGap);
}
if (includeDefaults || fieldType().fielddata() != ((TextFieldType) defaultFieldType).fielddata()) {
builder.field("fielddata", fieldType().fielddata());
}
if (fieldType().fielddata()) {
if (includeDefaults
|| fieldType().fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY
|| fieldType().fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY
|| fieldType().fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE) {
builder.startObject("fielddata_frequency_filter");
if (includeDefaults || fieldType().fielddataMinFrequency() != Defaults.FIELDDATA_MIN_FREQUENCY) {
builder.field("min", fieldType().fielddataMinFrequency());
}
if (includeDefaults || fieldType().fielddataMaxFrequency() != Defaults.FIELDDATA_MAX_FREQUENCY) {
builder.field("max", fieldType().fielddataMaxFrequency());
}
if (includeDefaults || fieldType().fielddataMinSegmentSize() != Defaults.FIELDDATA_MIN_SEGMENT_SIZE) {
builder.field("min_segment_size", fieldType().fielddataMinSegmentSize());
}
builder.endObject();
}
}
}
}

View File

@ -28,13 +28,10 @@ import org.elasticsearch.common.joda.Joda;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.ESLoggerFactory;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType.Loading;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.object.ObjectMapper;
@ -195,7 +192,7 @@ public class TypeParsers {
if (propName2.equals("enabled")) {
builder.omitNorms(!lenientNodeBooleanValue(propNode2));
propsIterator.remove();
} else if (propName2.equals(Loading.KEY)) {
} else if (propName2.equals("loading")) {
// ignore for bw compat
propsIterator.remove();
}
@ -266,9 +263,10 @@ public class TypeParsers {
SimilarityProvider similarityProvider = resolveSimilarity(parserContext, name, propNode.toString());
builder.similarity(similarityProvider);
iterator.remove();
} else if (propName.equals("fielddata")) {
final Settings settings = Settings.builder().put(SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(propNode, "fielddata"))).build();
builder.fieldDataSettings(settings);
} else if (propName.equals("fielddata")
&& propNode instanceof Map
&& parserContext.indexVersionCreated().before(Version.V_5_0_0)) {
// ignore for bw compat
iterator.remove();
} else if (propName.equals("copy_to")) {
if (parserContext.isWithinMultiField()) {

View File

@ -34,6 +34,8 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@ -73,7 +75,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
public static final boolean ENABLE_GEOHASH = false;
public static final boolean ENABLE_GEOHASH_PREFIX = false;
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit(false, false);
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit<>(false, false);
}
public abstract static class Builder<T extends Builder, Y extends BaseGeoPointFieldMapper> extends FieldMapper.Builder<T, Y> {
@ -99,12 +101,6 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
return (GeoPointFieldType)fieldType;
}
@Override
public T fieldDataSettings(Settings settings) {
this.fieldDataSettings = settings;
return builder;
}
public T enableLatLon(boolean enableLatLon) {
this.enableLatLon = enableLatLon;
return builder;
@ -345,6 +341,11 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
this.latFieldType = latFieldType;
this.lonFieldType = lonFieldType;
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
return new AbstractGeoPointDVIndexFieldData.Builder();
}
}
protected DoubleFieldMapper latMapper;

View File

@ -24,7 +24,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.Lucene;
@ -32,7 +31,6 @@ import org.elasticsearch.common.lucene.all.AllField;
import org.elasticsearch.common.lucene.all.AllTermQuery;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -168,7 +166,6 @@ public class AllFieldMapper extends MetadataFieldMapper {
static final class AllFieldType extends MappedFieldType {
public AllFieldType() {
setFieldDataType(new FieldDataType("string"));
}
protected AllFieldType(AllFieldType ref) {

View File

@ -22,16 +22,17 @@ package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.ArrayList;
@ -130,7 +131,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
private boolean enabled = Defaults.ENABLED;
public FieldNamesFieldType() {
setFieldDataType(new FieldDataType("string"));
}
protected FieldNamesFieldType(FieldNamesFieldType ref) {
@ -192,6 +192,14 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
public boolean useTermQueryWithQueryString() {
return true;
}
@Override
public Query termQuery(Object value, QueryShardContext context) {
if (isEnabled() == false) {
throw new IllegalStateException("Cannot run [exists] queries if the [_field_names] field is disabled");
}
return super.termQuery(value, context);
}
}
private FieldNamesFieldMapper(Settings indexSettings, MappedFieldType existing) {

View File

@ -38,7 +38,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.iterable.Iterables;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -112,7 +111,6 @@ public class IdFieldMapper extends MetadataFieldMapper {
static final class IdFieldType extends MappedFieldType {
public IdFieldType() {
setFieldDataType(new FieldDataType("string"));
}
protected IdFieldType(IdFieldType ref) {

View File

@ -29,6 +29,8 @@ import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.IndexIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -184,6 +186,11 @@ public class IndexFieldMapper extends MetadataFieldMapper {
}
return value.toString();
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
return new IndexIndexFieldData.Builder();
}
}
private EnabledAttributeMapper enabledState;

View File

@ -30,11 +30,14 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@ -53,7 +56,6 @@ import java.util.List;
import java.util.Map;
import java.util.Objects;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeMapValue;
/**
@ -94,6 +96,11 @@ public class ParentFieldMapper extends MetadataFieldMapper {
return builder;
}
public Builder eagerGlobalOrdinals(boolean eagerGlobalOrdinals) {
((ParentFieldType) fieldType()).setEagerGlobalOrdinals(eagerGlobalOrdinals);
return builder;
}
@Override
public ParentFieldMapper build(BuilderContext context) {
if (parentType == null) {
@ -106,6 +113,7 @@ public class ParentFieldMapper extends MetadataFieldMapper {
}
public static class TypeParser implements MetadataFieldMapper.TypeParser {
private static final ParseField FIELDDATA = new ParseField("fielddata").withAllDeprecated("eager_global_ordinals");
@Override
public MetadataFieldMapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = new Builder(parserContext.type());
@ -116,14 +124,16 @@ public class ParentFieldMapper extends MetadataFieldMapper {
if (fieldName.equals("type")) {
builder.type(fieldNode.toString());
iterator.remove();
} else if (fieldName.equals("fielddata")) {
// Only take over `loading`, since that is the only option now that is configurable:
} else if (parserContext.parseFieldMatcher().match(fieldName, FIELDDATA)) {
// for bw compat only
Map<String, String> fieldDataSettings = SettingsLoader.Helper.loadNestedFromMap(nodeMapValue(fieldNode, "fielddata"));
if (fieldDataSettings.containsKey(MappedFieldType.Loading.KEY)) {
Settings settings = settingsBuilder().put(MappedFieldType.Loading.KEY, fieldDataSettings.get(MappedFieldType.Loading.KEY)).build();
builder.fieldDataSettings(settings);
if (fieldDataSettings.containsKey("loading")) {
builder.eagerGlobalOrdinals("eager_global_ordinals".equals(fieldDataSettings.get("loading")));
}
iterator.remove();
} else if (fieldName.equals("eager_global_ordinals")) {
builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(fieldNode));
iterator.remove();
}
}
return builder;
@ -143,7 +153,6 @@ public class ParentFieldMapper extends MetadataFieldMapper {
parentJoinField.indexOptions(IndexOptions.NONE);
parentJoinField.docValues(true);
parentJoinField.fieldType().setDocValuesType(DocValuesType.SORTED);
parentJoinField.fieldType().setFieldDataType(null);
return parentJoinField.build(context);
}
@ -152,8 +161,8 @@ public class ParentFieldMapper extends MetadataFieldMapper {
final String documentType;
public ParentFieldType() {
setFieldDataType(new FieldDataType(NAME, settingsBuilder().put(MappedFieldType.Loading.KEY, Loading.EAGER_VALUE)));
documentType = null;
setEagerGlobalOrdinals(true);
}
ParentFieldType(ParentFieldType ref, String documentType) {
@ -200,6 +209,11 @@ public class ParentFieldMapper extends MetadataFieldMapper {
query.add(new TermQuery(new Term(TypeFieldMapper.NAME, documentType)), BooleanClause.Occur.FILTER);
return query.build();
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
return new ParentChildIndexFieldData.Builder();
}
}
private final String parentType;
@ -288,17 +302,13 @@ public class ParentFieldMapper extends MetadataFieldMapper {
builder.startObject(CONTENT_TYPE);
builder.field("type", parentType);
if (includeDefaults || joinFieldHasCustomFieldDataSettings()) {
builder.field("fielddata", (Map) fieldType().fieldDataType().getSettings().getAsMap());
if (includeDefaults || fieldType().eagerGlobalOrdinals() != defaultFieldType.eagerGlobalOrdinals()) {
builder.field("eager_global_ordinals", fieldType().eagerGlobalOrdinals());
}
builder.endObject();
return builder;
}
private boolean joinFieldHasCustomFieldDataSettings() {
return fieldType != null && fieldType.fieldDataType() != null && fieldType.fieldDataType().equals(Defaults.FIELD_TYPE.fieldDataType()) == false;
}
@Override
protected void doMerge(Mapper mergeWith, boolean updateAllTypes) {
super.doMerge(mergeWith, updateAllTypes);

View File

@ -26,7 +26,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -111,7 +110,6 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
static final class RoutingFieldType extends MappedFieldType {
public RoutingFieldType() {
setFieldDataType(new FieldDataType("string"));
}
protected RoutingFieldType(RoutingFieldType ref) {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
@ -95,7 +94,7 @@ public class TTLFieldMapper extends MetadataFieldMapper {
public TTLFieldMapper build(BuilderContext context) {
setupFieldType(context);
fieldType.setHasDocValues(false);
return new TTLFieldMapper(fieldType, enabledState, defaultTTL, fieldDataSettings, context.indexSettings());
return new TTLFieldMapper(fieldType, enabledState, defaultTTL, context.indexSettings());
}
}
@ -161,11 +160,11 @@ public class TTLFieldMapper extends MetadataFieldMapper {
private long defaultTTL;
private TTLFieldMapper(Settings indexSettings) {
this(Defaults.TTL_FIELD_TYPE.clone(), Defaults.ENABLED_STATE, Defaults.DEFAULT, null, indexSettings);
this(Defaults.TTL_FIELD_TYPE.clone(), Defaults.ENABLED_STATE, Defaults.DEFAULT, indexSettings);
}
private TTLFieldMapper(MappedFieldType fieldType, EnabledAttributeMapper enabled, long defaultTTL,
@Nullable Settings fieldDataSettings, Settings indexSettings) {
Settings indexSettings) {
super(NAME, fieldType, Defaults.TTL_FIELD_TYPE, indexSettings);
this.enabledState = enabled;
this.defaultTTL = defaultTTL;

View File

@ -34,7 +34,6 @@ import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -102,7 +101,6 @@ public class TypeFieldMapper extends MetadataFieldMapper {
static final class TypeFieldType extends MappedFieldType {
public TypeFieldType() {
setFieldDataType(new FieldDataType("string"));
}
protected TypeFieldType(TypeFieldType ref) {

View File

@ -25,25 +25,24 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
/**
*
*/
@ -104,7 +103,6 @@ public class UidFieldMapper extends MetadataFieldMapper {
static final class UidFieldType extends MappedFieldType {
public UidFieldType() {
setFieldDataType(new FieldDataType("string"));
}
protected UidFieldType(UidFieldType ref) {
@ -128,6 +126,15 @@ public class UidFieldMapper extends MetadataFieldMapper {
}
return Uid.createUid(value.toString());
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
// TODO: add doc values support?
return new PagedBytesIndexFieldData.Builder(
TextFieldMapper.Defaults.FIELDDATA_MIN_FREQUENCY,
TextFieldMapper.Defaults.FIELDDATA_MAX_FREQUENCY,
TextFieldMapper.Defaults.FIELDDATA_MIN_SEGMENT_SIZE);
}
}
private UidFieldMapper(Settings indexSettings, MappedFieldType existing) {

View File

@ -24,7 +24,6 @@ import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.DocValuesType;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -82,7 +81,6 @@ public class VersionFieldMapper extends MetadataFieldMapper {
static final class VersionFieldType extends MappedFieldType {
public VersionFieldType() {
setFieldDataType(new FieldDataType("long"));
}
protected VersionFieldType(VersionFieldType ref) {

View File

@ -43,7 +43,9 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.analysis.NumericAnalyzer;
import org.elasticsearch.index.analysis.NumericTokenizer;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
@ -162,7 +164,6 @@ public class IpFieldMapper extends NumberFieldMapper {
public static final class IpFieldType extends LongFieldMapper.LongFieldType {
public IpFieldType() {
setFieldDataType(new FieldDataType("long"));
}
protected IpFieldType(IpFieldType ref) {
@ -271,6 +272,12 @@ public class IpFieldMapper extends NumberFieldMapper {
long maxValue = LegacyNumericUtils.getMaxLong(terms);
return new FieldStats.Ip(maxDoc, terms.getDocCount(), terms.getSumDocFreq(), terms.getSumTotalTermFreq(), minValue, maxValue);
}
@Override
public IndexFieldData.Builder fielddataBuilder() {
failIfNoDocValues();
return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG);
}
}
protected IpFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,

View File

@ -19,11 +19,15 @@
package org.elasticsearch.index.mapper.object;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MapperParsingException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@ -33,30 +37,41 @@ import java.util.TreeMap;
/**
*
*/
public class DynamicTemplate {
public class DynamicTemplate implements ToXContent {
public static enum MatchType {
SIMPLE,
REGEX;
SIMPLE {
@Override
public String toString() {
return "simple";
}
},
REGEX {
@Override
public String toString() {
return "regex";
}
};
public static MatchType fromString(String value) {
if ("simple".equals(value)) {
return SIMPLE;
} else if ("regex".equals(value)) {
return REGEX;
for (MatchType v : values()) {
if (v.toString().equals(value)) {
return v;
}
}
throw new IllegalArgumentException("No matching pattern matched on [" + value + "]");
}
}
public static DynamicTemplate parse(String name, Map<String, Object> conf) throws MapperParsingException {
public static DynamicTemplate parse(String name, Map<String, Object> conf,
Version indexVersionCreated) throws MapperParsingException {
String match = null;
String pathMatch = null;
String unmatch = null;
String pathUnmatch = null;
Map<String, Object> mapping = null;
String matchMappingType = null;
String matchPattern = "simple";
String matchPattern = MatchType.SIMPLE.toString();
for (Map.Entry<String, Object> entry : conf.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey());
@ -74,22 +89,18 @@ public class DynamicTemplate {
matchPattern = entry.getValue().toString();
} else if ("mapping".equals(propName)) {
mapping = (Map<String, Object>) entry.getValue();
} else if (indexVersionCreated.onOrAfter(Version.V_5_0_0)) {
// unknown parameters were ignored before but still carried through serialization
// so we need to ignore them at parsing time for old indices
throw new IllegalArgumentException("Illegal dynamic template parameter: [" + propName + "]");
}
}
if (match == null && pathMatch == null && matchMappingType == null) {
throw new MapperParsingException("template must have match, path_match or match_mapping_type set");
}
if (mapping == null) {
throw new MapperParsingException("template must have mapping set");
}
return new DynamicTemplate(name, conf, pathMatch, pathUnmatch, match, unmatch, matchMappingType, MatchType.fromString(matchPattern), mapping);
return new DynamicTemplate(name, pathMatch, pathUnmatch, match, unmatch, matchMappingType, MatchType.fromString(matchPattern), mapping);
}
private final String name;
private final Map<String, Object> conf;
private final String pathMatch;
private final String pathUnmatch;
@ -104,9 +115,14 @@ public class DynamicTemplate {
private final Map<String, Object> mapping;
public DynamicTemplate(String name, Map<String, Object> conf, String pathMatch, String pathUnmatch, String match, String unmatch, String matchMappingType, MatchType matchType, Map<String, Object> mapping) {
public DynamicTemplate(String name, String pathMatch, String pathUnmatch, String match, String unmatch, String matchMappingType, MatchType matchType, Map<String, Object> mapping) {
if (match == null && pathMatch == null && matchMappingType == null) {
throw new MapperParsingException("template must have match, path_match or match_mapping_type set");
}
if (mapping == null) {
throw new MapperParsingException("template must have mapping set");
}
this.name = name;
this.conf = new TreeMap<>(conf);
this.pathMatch = pathMatch;
this.pathUnmatch = pathUnmatch;
this.match = match;
@ -120,10 +136,6 @@ public class DynamicTemplate {
return this.name;
}
public Map<String, Object> conf() {
return this.conf;
}
public boolean match(ContentPath path, String name, String dynamicType) {
if (pathMatch != null && !patternMatch(pathMatch, path.pathAsText(name))) {
return false;
@ -148,13 +160,31 @@ public class DynamicTemplate {
return true;
}
public boolean hasType() {
return mapping.containsKey("type");
}
public String mappingType(String dynamicType) {
return mapping.containsKey("type") ? mapping.get("type").toString().replace("{dynamic_type}", dynamicType).replace("{dynamicType}", dynamicType) : dynamicType;
}
String type;
if (mapping.containsKey("type")) {
type = mapping.get("type").toString();
type = type.replace("{dynamic_type}", dynamicType);
type = type.replace("{dynamicType}", dynamicType);
} else {
type = dynamicType;
}
if (type.equals(mapping.get("type")) == false // either the type was not set, or we updated it through replacements
&& "text".equals(type)) { // and the result is "text"
// now that string has been splitted into text and keyword, we use text for
// dynamic mappings. However before it used to be possible to index as a keyword
// by setting index=not_analyzed, so for now we will use a keyword field rather
// than a text field if index=not_analyzed and the field type was not specified
// explicitly
// TODO: remove this in 6.0
// TODO: how to do it in the future?
final Object index = mapping.get("index");
if ("not_analyzed".equals(index) || "no".equals(index)) {
type = "keyword";
}
}
return type;
}
private boolean patternMatch(String pattern, String str) {
if (matchType == MatchType.SIMPLE) {
@ -200,40 +230,29 @@ public class DynamicTemplate {
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
if (match != null) {
builder.field("match", match);
}
if (o == null || getClass() != o.getClass()) {
return false;
if (pathMatch != null) {
builder.field("path_match", pathMatch);
}
DynamicTemplate that = (DynamicTemplate) o;
// check if same matching, if so, replace the mapping
if (match != null ? !match.equals(that.match) : that.match != null) {
return false;
if (unmatch != null) {
builder.field("unmatch", unmatch);
}
if (matchMappingType != null ? !matchMappingType.equals(that.matchMappingType) : that.matchMappingType != null) {
return false;
if (pathUnmatch != null) {
builder.field("path_unmatch", pathUnmatch);
}
if (matchType != that.matchType) {
return false;
if (matchMappingType != null) {
builder.field("match_mapping_type", matchMappingType);
}
if (unmatch != null ? !unmatch.equals(that.unmatch) : that.unmatch != null) {
return false;
if (matchType != MatchType.SIMPLE) {
builder.field("match_pattern", matchType);
}
return true;
}
@Override
public int hashCode() {
// check if same matching, if so, replace the mapping
int result = match != null ? match.hashCode() : 0;
result = 31 * result + (unmatch != null ? unmatch.hashCode() : 0);
result = 31 * result + (matchType != null ? matchType.hashCode() : 0);
result = 31 * result + (matchMappingType != null ? matchMappingType.hashCode() : 0);
return result;
// use a sorted map for consistent serialization
builder.field("mapping", new TreeMap<>(mapping));
builder.endObject();
return builder;
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.object;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
@ -140,14 +141,15 @@ public class RootObjectMapper extends ObjectMapper {
String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue();
if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder)
|| processField(builder, fieldName, fieldNode)) {
|| processField(builder, fieldName, fieldNode, parserContext.indexVersionCreated())) {
iterator.remove();
}
}
return builder;
}
protected boolean processField(ObjectMapper.Builder builder, String fieldName, Object fieldNode) {
protected boolean processField(ObjectMapper.Builder builder, String fieldName, Object fieldNode,
Version indexVersionCreated) {
if (fieldName.equals("date_formats") || fieldName.equals("dynamic_date_formats")) {
List<FormatDateTimeFormatter> dateTimeFormatters = new ArrayList<>();
if (fieldNode instanceof List) {
@ -185,7 +187,10 @@ public class RootObjectMapper extends ObjectMapper {
throw new MapperParsingException("A dynamic template must be defined with a name");
}
Map.Entry<String, Object> entry = tmpl.entrySet().iterator().next();
((Builder) builder).add(DynamicTemplate.parse(entry.getKey(), (Map<String, Object>) entry.getValue()));
String templateName = entry.getKey();
Map<String, Object> templateParams = (Map<String, Object>) entry.getValue();
DynamicTemplate template = DynamicTemplate.parse(templateName, templateParams, indexVersionCreated);
((Builder) builder).add(template);
}
return true;
} else if (fieldName.equals("date_detection")) {
@ -329,8 +334,7 @@ public class RootObjectMapper extends ObjectMapper {
builder.startArray("dynamic_templates");
for (DynamicTemplate dynamicTemplate : dynamicTemplates) {
builder.startObject();
builder.field(dynamicTemplate.name());
builder.map(dynamicTemplate.conf());
builder.field(dynamicTemplate.name(), dynamicTemplate);
builder.endObject();
}
builder.endArray();

View File

@ -48,7 +48,9 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexWarmer;
import org.elasticsearch.index.IndexWarmer.TerminationHandle;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.Engine.Searcher;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.query.PercolatorQuery;
@ -98,9 +100,13 @@ public final class PercolatorQueryCache extends AbstractIndexComponent
final Executor executor = threadPool.executor(ThreadPool.Names.WARMER);
@Override
public IndexWarmer.TerminationHandle warmNewReaders(IndexShard indexShard, Engine.Searcher searcher) {
public TerminationHandle warmReader(IndexShard indexShard, Searcher searcher) {
final CountDownLatch latch = new CountDownLatch(searcher.reader().leaves().size());
for (final LeafReaderContext ctx : searcher.reader().leaves()) {
if (cache.get(ctx.reader().getCoreCacheKey()) != null) {
latch.countDown();
continue;
}
executor.execute(() -> {
try {
final long start = System.nanoTime();
@ -121,11 +127,6 @@ public final class PercolatorQueryCache extends AbstractIndexComponent
}
return () -> latch.await();
}
@Override
public IndexWarmer.TerminationHandle warmTopReader(IndexShard indexShard, Engine.Searcher searcher) {
return IndexWarmer.TerminationHandle.NO_WAIT;
}
};
}

View File

@ -23,18 +23,16 @@ import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Objects;
/**
@ -82,38 +80,18 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
return Queries.newMatchNoDocsQuery();
}
ObjectMapper objectMapper = context.getObjectMapper(fieldPattern);
if (objectMapper != null) {
// automatic make the object mapper pattern
fieldPattern = fieldPattern + ".*";
}
Collection<String> fields = context.simpleMatchToIndexNames(fieldPattern);
if (fields.isEmpty()) {
// no fields exists, so we should not match anything
return Queries.newMatchNoDocsQuery();
final Collection<String> fields;
if (context.getObjectMapper(fieldPattern) != null) {
// the _field_names field also indexes objects, so we don't have to
// do any more work to support exists queries on whole objects
fields = Collections.singleton(fieldPattern);
} else {
fields = context.simpleMatchToIndexNames(fieldPattern);
}
BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder();
for (String field : fields) {
MappedFieldType fieldType = context.fieldMapper(field);
Query filter = null;
if (fieldNamesFieldType.isEnabled()) {
final String f;
if (fieldType != null) {
f = fieldType.name();
} else {
f = field;
}
filter = fieldNamesFieldType.termQuery(f, context);
}
// if _field_names are not indexed, we need to go the slow way
if (filter == null && fieldType != null) {
filter = fieldType.rangeQuery(null, null, true, true);
}
if (filter == null) {
filter = new TermRangeQuery(field, null, null, true, true);
}
Query filter = fieldNamesFieldType.termQuery(field, context);
boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD);
}
return new ConstantScoreQuery(boolFilterBuilder.build());

View File

@ -105,6 +105,7 @@ public class QueryShardContext extends QueryRewriteContext {
this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
this.indicesQueriesRegistry = indicesQueriesRegistry;
this.percolatorQueryCache = percolatorQueryCache;
this.nestedScope = new NestedScope();
}
public QueryShardContext(QueryShardContext source) {
@ -113,6 +114,7 @@ public class QueryShardContext extends QueryRewriteContext {
}
@Override
public QueryShardContext clone() {
return new QueryShardContext(indexSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry, percolatorQueryCache);
}

View File

@ -22,10 +22,6 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.action.fieldstats.IndexConstraint;
import org.elasticsearch.action.fieldstats.IndexConstraint.Comparison;
import org.elasticsearch.action.fieldstats.IndexConstraint.Property;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -259,8 +255,8 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
}
@Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryShardContext) throws IOException {
FieldStatsProvider fieldStatsProvider = queryShardContext.getFieldStatsProvider();
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
FieldStatsProvider fieldStatsProvider = queryRewriteContext.getFieldStatsProvider();
// If the fieldStatsProvider is null we are not on the shard and cannot
// rewrite so just return without rewriting
if (fieldStatsProvider != null) {
@ -271,17 +267,10 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
case DISJOINT:
return new MatchNoneQueryBuilder();
case WITHIN:
FieldStats<?> fieldStats = fieldStatsProvider.get(fieldName);
if (!(fieldStats.getMinValue().equals(from) && fieldStats.getMaxValue().equals(to) && includeUpper && includeLower)) {
// Rebuild the range query with the bounds for this shard.
// The includeLower/Upper values are preserved only if the
// bound has not been changed by the rewrite
if (from != null || to != null) {
RangeQueryBuilder newRangeQuery = new RangeQueryBuilder(fieldName);
String dateFormatString = format == null ? null : format.format();
newRangeQuery.from(fieldStats.getMinValue(), includeLower || fieldStats.match(
new IndexConstraint(fieldName, Property.MIN, Comparison.GT, fieldStats.stringValueOf(from, dateFormatString))));
newRangeQuery.to(fieldStats.getMaxValue(), includeUpper || fieldStats.match(
new IndexConstraint(fieldName, Property.MAX, Comparison.LT, fieldStats.stringValueOf(to, dateFormatString))));
newRangeQuery.from(null);
newRangeQuery.to(null);
newRangeQuery.format = format;
newRangeQuery.timeZone = timeZone;
return newRangeQuery;

View File

@ -20,9 +20,7 @@
package org.elasticsearch.index.query.support;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitSetProducer;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
@ -45,46 +43,18 @@ public class NestedInnerQueryParseSupport {
protected final QueryParseContext parseContext;
private BytesReference source;
private Query innerQuery;
private Query innerFilter;
protected String path;
private boolean filterParsed = false;
private boolean queryParsed = false;
protected boolean queryFound = false;
protected boolean filterFound = false;
protected BitSetProducer parentFilter;
protected Query childFilter;
protected ObjectMapper nestedObjectMapper;
private ObjectMapper parentObjectMapper;
public NestedInnerQueryParseSupport(XContentParser parser, QueryShardContext context) {
shardContext = context;
parseContext = shardContext.parseContext();
shardContext.reset(parser);
}
public NestedInnerQueryParseSupport(QueryShardContext context) {
this.parseContext = context.parseContext();
this.shardContext = context;
}
public void query() throws IOException {
if (path != null) {
setPathLevel();
try {
innerQuery = parseContext.parseInnerQueryBuilder().toQuery(this.shardContext);
} finally {
resetPathLevel();
}
queryParsed = true;
} else {
source = XContentFactory.smileBuilder().copyCurrentStructure(parseContext.parser()).bytes();
}
queryFound = true;
}
public void filter() throws IOException {
@ -103,35 +73,6 @@ public class NestedInnerQueryParseSupport {
filterFound = true;
}
public Query getInnerQuery() throws IOException {
if (queryParsed) {
return innerQuery;
} else {
if (path == null) {
throw new QueryShardException(shardContext, "[nested] requires 'path' field");
}
if (!queryFound) {
throw new QueryShardException(shardContext, "[nested] requires either 'query' or 'filter' field");
}
XContentParser old = parseContext.parser();
try {
XContentParser innerParser = XContentHelper.createParser(source);
parseContext.parser(innerParser);
setPathLevel();
try {
innerQuery = parseContext.parseInnerQueryBuilder().toQuery(this.shardContext);
} finally {
resetPathLevel();
}
queryParsed = true;
return innerQuery;
} finally {
parseContext.parser(old);
}
}
}
public Query getInnerFilter() throws IOException {
if (filterParsed) {
return innerFilter;
@ -178,27 +119,12 @@ public class NestedInnerQueryParseSupport {
return nestedObjectMapper;
}
public boolean queryFound() {
return queryFound;
}
public boolean filterFound() {
return filterFound;
}
public ObjectMapper getParentObjectMapper() {
return parentObjectMapper;
}
private void setPathLevel() {
ObjectMapper objectMapper = shardContext.nestedScope().getObjectMapper();
if (objectMapper == null) {
parentFilter = shardContext.bitsetFilter(Queries.newNonNestedFilter());
} else {
parentFilter = shardContext.bitsetFilter(objectMapper.nestedTypeFilter());
}
childFilter = nestedObjectMapper.nestedTypeFilter();
parentObjectMapper = shardContext.nestedScope().nextLevel(nestedObjectMapper);
shardContext.nestedScope().nextLevel(nestedObjectMapper);
}
private void resetPathLevel() {

View File

@ -46,12 +46,12 @@ import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.Callback;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.SuspendableRefContainer;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.NodeServicesProvider;
import org.elasticsearch.index.SearchSlowLog;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.cache.IndexCache;
@ -193,7 +193,7 @@ public class IndexShard extends AbstractIndexShardComponent {
public IndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexCache indexCache,
MapperService mapperService, SimilarityService similarityService, IndexFieldDataService indexFieldDataService,
@Nullable EngineFactory engineFactory,
IndexEventListener indexEventListener, IndexSearcherWrapper indexSearcherWrapper, NodeServicesProvider provider,
IndexEventListener indexEventListener, IndexSearcherWrapper indexSearcherWrapper, ThreadPool threadPool, BigArrays bigArrays,
SearchSlowLog slowLog, Engine.Warmer warmer, IndexingOperationListener... listeners) {
super(shardId, indexSettings);
final Settings settings = indexSettings.getSettings();
@ -205,7 +205,7 @@ public class IndexShard extends AbstractIndexShardComponent {
this.engineFactory = engineFactory == null ? new InternalEngineFactory() : engineFactory;
this.store = store;
this.indexEventListener = indexEventListener;
this.threadPool = provider.getThreadPool();
this.threadPool = threadPool;
this.mapperService = mapperService;
this.indexCache = indexCache;
this.internalIndexingStats = new InternalIndexingStats();
@ -226,7 +226,7 @@ public class IndexShard extends AbstractIndexShardComponent {
this.checkIndexOnStartup = indexSettings.getValue(IndexSettings.INDEX_CHECK_ON_STARTUP);
this.translogConfig = new TranslogConfig(shardId, shardPath().resolveTranslog(), indexSettings,
provider.getBigArrays());
bigArrays);
final QueryCachingPolicy cachingPolicy;
// the query cache is a node-level thing, however we want the most popular filters
// to be computed on a per-shard basis

View File

@ -20,8 +20,8 @@ package org.elasticsearch.index.shard;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.NodeServicesProvider;
import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.EngineConfig;
@ -33,6 +33,7 @@ import org.elasticsearch.index.SearchSlowLog;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.TranslogStats;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
@ -44,9 +45,13 @@ import java.io.IOException;
*/
public final class ShadowIndexShard extends IndexShard {
public ShadowIndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexCache indexCache, MapperService mapperService, SimilarityService similarityService, IndexFieldDataService indexFieldDataService, @Nullable EngineFactory engineFactory,
IndexEventListener indexEventListener, IndexSearcherWrapper wrapper, NodeServicesProvider provider, SearchSlowLog searchSlowLog, Engine.Warmer engineWarmer) throws IOException {
super(shardId, indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldDataService, engineFactory, indexEventListener, wrapper, provider, searchSlowLog, engineWarmer);
public ShadowIndexShard(ShardId shardId, IndexSettings indexSettings, ShardPath path, Store store, IndexCache indexCache,
MapperService mapperService, SimilarityService similarityService, IndexFieldDataService indexFieldDataService,
@Nullable EngineFactory engineFactory, IndexEventListener indexEventListener, IndexSearcherWrapper wrapper,
ThreadPool threadPool, BigArrays bigArrays, SearchSlowLog searchSlowLog, Engine.Warmer engineWarmer)
throws IOException {
super(shardId, indexSettings, path, store, indexCache, mapperService, similarityService, indexFieldDataService, engineFactory,
indexEventListener, wrapper, threadPool, bigArrays, searchSlowLog, engineWarmer);
}
/**

View File

@ -64,7 +64,6 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.NodeServicesProvider;
import org.elasticsearch.index.analysis.AnalysisRegistry;
import org.elasticsearch.index.cache.request.ShardRequestCache;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.flush.FlushStats;
import org.elasticsearch.index.get.GetStats;
@ -177,7 +176,7 @@ public class IndicesService extends AbstractLifecycleComponent<IndicesService> i
this.circuitBreakerService = circuitBreakerService;
this.indicesFieldDataCache = new IndicesFieldDataCache(settings, new IndexFieldDataCache.Listener() {
@Override
public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, long sizeInBytes) {
assert sizeInBytes >= 0 : "When reducing circuit breaker, it should be adjusted with a number higher or equal to 0 and not [" + sizeInBytes + "]";
circuitBreakerService.getBreaker(CircuitBreaker.FIELDDATA).addWithoutBreaking(-sizeInBytes);
}

View File

@ -39,7 +39,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.shard.ShardId;
@ -75,8 +74,8 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
cache.invalidateAll();
}
public IndexFieldDataCache buildIndexFieldDataCache(IndexFieldDataCache.Listener listener, Index index, String fieldName, FieldDataType fieldDataType) {
return new IndexFieldCache(logger, cache, index, fieldName, fieldDataType, indicesFieldDataCacheListener, listener);
public IndexFieldDataCache buildIndexFieldDataCache(IndexFieldDataCache.Listener listener, Index index, String fieldName) {
return new IndexFieldCache(logger, cache, index, fieldName, indicesFieldDataCacheListener, listener);
}
public Cache<Key, Accountable> getCache() {
@ -91,7 +90,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
final Accountable value = notification.getValue();
for (IndexFieldDataCache.Listener listener : key.listeners) {
try {
listener.onRemoval(key.shardId, indexCache.fieldName, indexCache.fieldDataType, notification.getRemovalReason() == RemovalNotification.RemovalReason.EVICTED, value.ramBytesUsed());
listener.onRemoval(key.shardId, indexCache.fieldName, notification.getRemovalReason() == RemovalNotification.RemovalReason.EVICTED, value.ramBytesUsed());
} catch (Throwable e) {
// load anyway since listeners should not throw exceptions
logger.error("Failed to call listener on field data cache unloading", e);
@ -114,16 +113,14 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
private final ESLogger logger;
final Index index;
final String fieldName;
final FieldDataType fieldDataType;
private final Cache<Key, Accountable> cache;
private final Listener[] listeners;
IndexFieldCache(ESLogger logger,final Cache<Key, Accountable> cache, Index index, String fieldName, FieldDataType fieldDataType, Listener... listeners) {
IndexFieldCache(ESLogger logger,final Cache<Key, Accountable> cache, Index index, String fieldName, Listener... listeners) {
this.logger = logger;
this.listeners = listeners;
this.index = index;
this.fieldName = fieldName;
this.fieldDataType = fieldDataType;
this.cache = cache;
}
@ -140,7 +137,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
final AtomicFieldData fieldData = indexFieldData.loadDirect(context);
for (Listener listener : k.listeners) {
try {
listener.onCache(shardId, fieldName, fieldDataType, fieldData);
listener.onCache(shardId, fieldName, fieldData);
} catch (Throwable e) {
// load anyway since listeners should not throw exceptions
logger.error("Failed to call listener on atomic field data loading", e);
@ -164,7 +161,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL
final Accountable ifd = (Accountable) indexFieldData.localGlobalDirect(indexReader);
for (Listener listener : k.listeners) {
try {
listener.onCache(shardId, fieldName, fieldDataType, ifd);
listener.onCache(shardId, fieldName, ifd);
} catch (Throwable e) {
// load anyway since listeners should not throw exceptions
logger.error("Failed to call listener on global ordinals loading", e);

View File

@ -22,7 +22,6 @@ package org.elasticsearch.indices.fielddata.cache;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
@ -43,11 +42,11 @@ public class IndicesFieldDataCacheListener implements IndexFieldDataCache.Listen
}
@Override
public void onCache(ShardId shardId, String fieldName, FieldDataType fieldDataType, Accountable fieldData) {
public void onCache(ShardId shardId, String fieldName, Accountable fieldData) {
}
@Override
public void onRemoval(ShardId shardId, String fieldName, FieldDataType fieldDataType, boolean wasEvicted, long sizeInBytes) {
public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, long sizeInBytes) {
assert sizeInBytes >= 0 : "When reducing circuit breaker, it should be adjusted with a number higher or equal to 0 and not [" + sizeInBytes + "]";
circuitBreakerService.getBreaker(CircuitBreaker.FIELDDATA).addWithoutBreaking(-sizeInBytes);
}

View File

@ -90,8 +90,7 @@ public class RecoveryTargetService extends AbstractComponent implements IndexEve
@Inject
public RecoveryTargetService(Settings settings, ThreadPool threadPool, TransportService transportService, RecoverySettings
recoverySettings,
ClusterService clusterService) {
recoverySettings, ClusterService clusterService) {
super(settings);
this.threadPool = threadPool;
this.transportService = transportService;
@ -329,9 +328,13 @@ public class RecoveryTargetService extends AbstractComponent implements IndexEve
throw exception;
}
// in very rare cases a translog replay from primary is processed before a mapping update on this node
// which causes local mapping changes. we want to wait until these mappings are processed.
// which causes local mapping changes since the mapping (clusterstate) might not have arrived on this node.
// we want to wait until these mappings are processed but also need to do some maintenance and roll back the
// number of processed (completed) operations in this batch to ensure accounting is correct.
logger.trace("delaying recovery due to missing mapping changes (rolling back stats for [{}] ops)", exception, exception
.completedOperations());
final RecoveryState.Translog translog = recoveryTarget.state().getTranslog();
translog.decrementRecoveredOperations(exception.completedOperations()); // do the maintainance and rollback competed ops
// we do not need to use a timeout here since the entire recovery mechanism has an inactivity protection (it will be
// canceled)
observer.waitForNextChange(new ClusterStateObserver.Listener() {

View File

@ -135,22 +135,22 @@ public class RestIndicesAction extends AbstractCatAction {
table.addCell("fielddata.evictions", "sibling:pri;alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions");
table.addCell("pri.fielddata.evictions", "default:false;text-align:right;desc:fielddata evictions");
table.addCell("query_cache.memory_size", "sibling:pri;alias:fcm,queryCacheMemory;default:false;text-align:right;desc:used query cache");
table.addCell("query_cache.memory_size", "sibling:pri;alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used query cache");
table.addCell("pri.query_cache.memory_size", "default:false;text-align:right;desc:used query cache");
table.addCell("query_cache.evictions", "sibling:pri;alias:fce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions");
table.addCell("query_cache.evictions", "sibling:pri;alias:qce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions");
table.addCell("pri.query_cache.evictions", "default:false;text-align:right;desc:query cache evictions");
table.addCell("request_cache.memory_size", "sibling:pri;alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used request cache");
table.addCell("request_cache.memory_size", "sibling:pri;alias:rcm,requestCacheMemory;default:false;text-align:right;desc:used request cache");
table.addCell("pri.request_cache.memory_size", "default:false;text-align:right;desc:used request cache");
table.addCell("request_cache.evictions", "sibling:pri;alias:qce,queryCacheEvictions;default:false;text-align:right;desc:request cache evictions");
table.addCell("request_cache.evictions", "sibling:pri;alias:rce,requestCacheEvictions;default:false;text-align:right;desc:request cache evictions");
table.addCell("pri.request_cache.evictions", "default:false;text-align:right;desc:request cache evictions");
table.addCell("request_cache.hit_count", "sibling:pri;alias:qchc,queryCacheHitCount;default:false;text-align:right;desc:request cache hit count");
table.addCell("request_cache.hit_count", "sibling:pri;alias:rchc,requestCacheHitCount;default:false;text-align:right;desc:request cache hit count");
table.addCell("pri.request_cache.hit_count", "default:false;text-align:right;desc:request cache hit count");
table.addCell("request_cache.miss_count", "sibling:pri;alias:qcmc,queryCacheMissCount;default:false;text-align:right;desc:request cache miss count");
table.addCell("request_cache.miss_count", "sibling:pri;alias:rcmc,requestCacheMissCount;default:false;text-align:right;desc:request cache miss count");
table.addCell("pri.request_cache.miss_count", "default:false;text-align:right;desc:request cache miss count");
table.addCell("flush.total", "sibling:pri;alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes");

View File

@ -151,13 +151,13 @@ public class RestNodesAction extends AbstractCatAction {
table.addCell("fielddata.memory_size", "alias:fm,fielddataMemory;default:false;text-align:right;desc:used fielddata cache");
table.addCell("fielddata.evictions", "alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions");
table.addCell("query_cache.memory_size", "alias:fcm,queryCacheMemory;default:false;text-align:right;desc:used query cache");
table.addCell("query_cache.evictions", "alias:fce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions");
table.addCell("query_cache.memory_size", "alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used query cache");
table.addCell("query_cache.evictions", "alias:qce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions");
table.addCell("request_cache.memory_size", "alias:qcm,requestCacheMemory;default:false;text-align:right;desc:used request cache");
table.addCell("request_cache.evictions", "alias:qce,requestCacheEvictions;default:false;text-align:right;desc:request cache evictions");
table.addCell("request_cache.hit_count", "alias:qchc,requestCacheHitCount;default:false;text-align:right;desc:request cache hit counts");
table.addCell("request_cache.miss_count", "alias:qcmc,requestCacheMissCount;default:false;text-align:right;desc:request cache miss counts");
table.addCell("request_cache.memory_size", "alias:rcm,requestCacheMemory;default:false;text-align:right;desc:used request cache");
table.addCell("request_cache.evictions", "alias:rce,requestCacheEvictions;default:false;text-align:right;desc:request cache evictions");
table.addCell("request_cache.hit_count", "alias:rchc,requestCacheHitCount;default:false;text-align:right;desc:request cache hit counts");
table.addCell("request_cache.miss_count", "alias:rcmc,requestCacheMissCount;default:false;text-align:right;desc:request cache miss counts");
table.addCell("flush.total", "alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes");
table.addCell("flush.total_time", "alias:ftt,flushTotalTime;default:false;text-align:right;desc:time spent in flush");

View File

@ -109,8 +109,8 @@ public class RestShardsAction extends AbstractCatAction {
table.addCell("fielddata.memory_size", "alias:fm,fielddataMemory;default:false;text-align:right;desc:used fielddata cache");
table.addCell("fielddata.evictions", "alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions");
table.addCell("query_cache.memory_size", "alias:fcm,queryCacheMemory;default:false;text-align:right;desc:used query cache");
table.addCell("query_cache.evictions", "alias:fce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions");
table.addCell("query_cache.memory_size", "alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used query cache");
table.addCell("query_cache.evictions", "alias:qce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions");
table.addCell("flush.total", "alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes");
table.addCell("flush.total_time", "alias:ftt,flushTotalTime;default:false;text-align:right;desc:time spent in flush");

View File

@ -549,8 +549,14 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher,
defaultSearchTimeout, fetchPhase);
context.getQueryShardContext().setFieldStatsProvider(new FieldStatsProvider(engineSearcher, indexService.mapperService()));
request.rewrite(context.getQueryShardContext());
SearchContext.setCurrent(context);
request.rewrite(context.getQueryShardContext());
// reset that we have used nowInMillis from the context since it may
// have been rewritten so its no longer in the query and the request can
// be cached. If it is still present in the request (e.g. in a range
// aggregation) it will still be caught when the aggregation is
// evaluated.
context.resetNowInMillisUsed();
try {
if (request.scroll() != null) {
context.scrollContext(new ScrollContext());

View File

@ -194,6 +194,12 @@ public class FiltersAggregator extends BucketsAggregator {
InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(keys[i], 0, subAggs, keyed);
buckets.add(bucket);
}
if (showOtherBucket) {
InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, 0, subAggs, keyed);
buckets.add(bucket);
}
return new InternalFilters(name, buckets, keyed, pipelineAggregators(), metaData());
}

View File

@ -149,6 +149,10 @@ public abstract class SearchContext implements Releasable {
return nowInMillisUsed;
}
public final void resetNowInMillisUsed() {
this.nowInMillisUsed = false;
}
protected abstract long nowInMillisImpl();
public abstract ScrollContext scrollContext();

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
@ -27,8 +28,14 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
import java.util.Objects;
@ -47,6 +54,13 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
public static final ParseField SORT_MODE = new ParseField("mode");
public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type");
/**
* special field name to sort by index order
*/
public static final String DOC_FIELD_NAME = "_doc";
private static final SortField SORT_DOC = new SortField(null, SortField.Type.DOC);
private static final SortField SORT_DOC_REVERSE = new SortField(null, SortField.Type.DOC, true);
private final String fieldName;
private Object missing;
@ -161,7 +175,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
* TODO should the above getters and setters be deprecated/ changed in
* favour of real getters and setters?
*/
public FieldSortBuilder setNestedFilter(QueryBuilder nestedFilter) {
public FieldSortBuilder setNestedFilter(QueryBuilder<?> nestedFilter) {
this.nestedFilter = nestedFilter;
return this;
}
@ -170,7 +184,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
* Returns the nested filter that the nested objects should match with in
* order to be taken into account for sorting.
*/
public QueryBuilder getNestedFilter() {
public QueryBuilder<?> getNestedFilter() {
return this.nestedFilter;
}
@ -219,6 +233,49 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
return builder;
}
@Override
public SortField build(QueryShardContext context) throws IOException {
if (DOC_FIELD_NAME.equals(fieldName)) {
if (order == SortOrder.DESC) {
return SORT_DOC_REVERSE;
} else {
return SORT_DOC;
}
} else {
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
if (unmappedType != null) {
fieldType = context.getMapperService().unmappedFieldType(unmappedType);
} else {
throw new QueryShardException(context, "No mapping found for [" + fieldName + "] in order to sort on");
}
}
if (!fieldType.isSortable()) {
throw new QueryShardException(context, "Sorting not supported for field[" + fieldName + "]");
}
MultiValueMode localSortMode = null;
if (sortMode != null) {
localSortMode = MultiValueMode.fromString(sortMode.toString());
}
if (fieldType.isNumeric() == false && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) {
throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields");
}
boolean reverse = (order == SortOrder.DESC);
if (localSortMode == null) {
localSortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
}
final Nested nested = resolveNested(context, nestedPath, nestedFilter);
IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.getForField(fieldType)
.comparatorSource(missing, localSortMode, nested);
return new SortField(fieldType.name(), fieldComparatorSource, reverse);
}
}
@Override
public boolean equals(Object other) {
if (this == other) {

View File

@ -19,8 +19,18 @@
package org.elasticsearch.search.sort;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoDistance.FixedSourceDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.io.stream.StreamInput;
@ -28,8 +38,17 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
import java.util.ArrayList;
@ -45,6 +64,14 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
public static final String NAME = "_geo_distance";
public static final boolean DEFAULT_COERCE = false;
public static final boolean DEFAULT_IGNORE_MALFORMED = false;
public static final ParseField UNIT_FIELD = new ParseField("unit");
public static final ParseField REVERSE_FIELD = new ParseField("reverse");
public static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
public static final ParseField SORTMODE_FIELD = new ParseField("mode", "sort_mode");
public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path");
public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter");
static final GeoDistanceSortBuilder PROTOTYPE = new GeoDistanceSortBuilder("", -1, -1);
@ -280,22 +307,22 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
}
builder.endArray();
builder.field("unit", unit);
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT));
builder.field(UNIT_FIELD.getPreferredName(), unit);
builder.field(DISTANCE_TYPE_FIELD.getPreferredName(), geoDistance.name().toLowerCase(Locale.ROOT));
builder.field(ORDER_FIELD.getPreferredName(), order);
if (sortMode != null) {
builder.field("mode", sortMode);
builder.field(SORTMODE_FIELD.getPreferredName(), sortMode);
}
if (nestedPath != null) {
builder.field("nested_path", nestedPath);
builder.field(NESTED_PATH_FIELD.getPreferredName(), nestedPath);
}
if (nestedFilter != null) {
builder.field("nested_filter", nestedFilter, params);
builder.field(NESTED_FILTER_FIELD.getPreferredName(), nestedFilter, params);
}
builder.field("coerce", coerce);
builder.field("ignore_malformed", ignoreMalformed);
builder.field(COERCE_FIELD.getPreferredName(), coerce);
builder.field(IGNORE_MALFORMED_FIELD.getPreferredName(), ignoreMalformed);
builder.endObject();
return builder;
@ -383,6 +410,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
@Override
public GeoDistanceSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException {
XContentParser parser = context.parser();
ParseFieldMatcher parseFieldMatcher = context.parseFieldMatcher();
String fieldName = null;
List<GeoPoint> geoPoints = new ArrayList<>();
DistanceUnit unit = DistanceUnit.DEFAULT;
@ -405,40 +433,37 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
fieldName = currentName;
} else if (token == XContentParser.Token.START_OBJECT) {
// the json in the format of -> field : { lat : 30, lon : 12 }
if ("nested_filter".equals(currentName) || "nestedFilter".equals(currentName)) {
// TODO Note to remember: while this is kept as a QueryBuilder internally,
// we need to make sure to call toFilter() on it once on the shard
// (e.g. in the new build() method)
if (parseFieldMatcher.match(currentName, NESTED_FILTER_FIELD)) {
nestedFilter = context.parseInnerQueryBuilder();
} else {
// the json in the format of -> field : { lat : 30, lon : 12 }
fieldName = currentName;
GeoPoint point = new GeoPoint();
GeoUtils.parseGeoPoint(parser, point);
geoPoints.add(point);
}
} else if (token.isValue()) {
if ("reverse".equals(currentName)) {
if (parseFieldMatcher.match(currentName, REVERSE_FIELD)) {
order = parser.booleanValue() ? SortOrder.DESC : SortOrder.ASC;
} else if ("order".equals(currentName)) {
} else if (parseFieldMatcher.match(currentName, ORDER_FIELD)) {
order = SortOrder.fromString(parser.text());
} else if ("unit".equals(currentName)) {
} else if (parseFieldMatcher.match(currentName, UNIT_FIELD)) {
unit = DistanceUnit.fromString(parser.text());
} else if ("distance_type".equals(currentName) || "distanceType".equals(currentName)) {
} else if (parseFieldMatcher.match(currentName, DISTANCE_TYPE_FIELD)) {
geoDistance = GeoDistance.fromString(parser.text());
} else if ("coerce".equals(currentName) || "normalize".equals(currentName)) {
} else if (parseFieldMatcher.match(currentName, COERCE_FIELD)) {
coerce = parser.booleanValue();
if (coerce == true) {
ignoreMalformed = true;
}
} else if ("ignore_malformed".equals(currentName)) {
} else if (parseFieldMatcher.match(currentName, IGNORE_MALFORMED_FIELD)) {
boolean ignore_malformed_value = parser.booleanValue();
if (coerce == false) {
ignoreMalformed = ignore_malformed_value;
}
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) {
} else if (parseFieldMatcher.match(currentName, SORTMODE_FIELD)) {
sortMode = SortMode.fromString(parser.text());
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
} else if (parseFieldMatcher.match(currentName, NESTED_PATH_FIELD)) {
nestedPath = parser.text();
} else {
GeoPoint point = new GeoPoint();
@ -461,7 +486,85 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
result.coerce(coerce);
result.ignoreMalformed(ignoreMalformed);
return result;
}
@Override
public SortField build(QueryShardContext context) throws IOException {
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
List<GeoPoint> localPoints = new ArrayList<GeoPoint>();
for (GeoPoint geoPoint : this.points) {
localPoints.add(new GeoPoint(geoPoint));
}
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
for (GeoPoint point : localPoints) {
if (GeoUtils.isValidLatitude(point.lat()) == false) {
throw new ElasticsearchParseException("illegal latitude value [{}] for [GeoDistanceSort]", point.lat());
}
if (GeoUtils.isValidLongitude(point.lon()) == false) {
throw new ElasticsearchParseException("illegal longitude value [{}] for [GeoDistanceSort]", point.lon());
}
}
}
if (coerce) {
for (GeoPoint point : localPoints) {
GeoUtils.normalizePoint(point, coerce, coerce);
}
}
boolean reverse = (order == SortOrder.DESC);
final MultiValueMode finalSortMode;
if (sortMode == null) {
finalSortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
} else {
finalSortMode = MultiValueMode.fromString(sortMode.toString());
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new IllegalArgumentException("failed to find mapper for [" + fieldName + "] for geo distance based sort");
}
final IndexGeoPointFieldData geoIndexFieldData = context.getForField(fieldType);
final FixedSourceDistance[] distances = new FixedSourceDistance[localPoints.size()];
for (int i = 0; i< localPoints.size(); i++) {
distances[i] = geoDistance.fixedSourceDistance(localPoints.get(i).lat(), localPoints.get(i).lon(), unit);
}
final Nested nested = resolveNested(context, nestedPath, nestedFilter);
IndexFieldData.XFieldComparatorSource geoDistanceComparatorSource = new IndexFieldData.XFieldComparatorSource() {
@Override
public SortField.Type reducedType() {
return SortField.Type.DOUBLE;
}
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
return new FieldComparator.DoubleComparator(numHits, null, null) {
@Override
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
final MultiGeoPointValues geoPointValues = geoIndexFieldData.load(context).getGeoPointValues();
final SortedNumericDoubleValues distanceValues = GeoDistance.distanceValues(geoPointValues, distances);
final NumericDoubleValues selectedValues;
if (nested == null) {
selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE);
} else {
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE, rootDocs, innerDocs,
context.reader().maxDoc());
}
return selectedValues.getRawDoubleValues();
}
};
}
};
return new SortField(fieldName, geoDistanceComparatorSource, reverse);
}
static void parseGeoPoints(XContentParser parser, List<GeoPoint> geoPoints) throws IOException {

View File

@ -62,7 +62,7 @@ public class GeoDistanceSortParser implements SortParser {
}
@Override
public SortField parse(XContentParser parser, QueryShardContext context) throws Exception {
public SortField parse(XContentParser parser, QueryShardContext context) throws IOException {
String fieldName = null;
List<GeoPoint> geoPoints = new ArrayList<>();
DistanceUnit unit = DistanceUnit.DEFAULT;

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.sort;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
@ -27,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.Objects;
@ -40,6 +42,8 @@ public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> implements S
static final ScoreSortBuilder PROTOTYPE = new ScoreSortBuilder();
public static final ParseField REVERSE_FIELD = new ParseField("reverse");
public static final ParseField ORDER_FIELD = new ParseField("order");
private static final SortField SORT_SCORE = new SortField(null, SortField.Type.SCORE);
private static final SortField SORT_SCORE_REVERSE = new SortField(null, SortField.Type.SCORE, true);
public ScoreSortBuilder() {
// order defaults to desc when sorting on the _score
@ -84,6 +88,14 @@ public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> implements S
return result;
}
public SortField build(QueryShardContext context) {
if (order == SortOrder.DESC) {
return SORT_SCORE;
} else {
return SORT_SCORE_REVERSE;
}
}
@Override
public boolean equals(Object object) {
if (this == object) {

View File

@ -19,6 +19,12 @@
package org.elasticsearch.search.sort;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
@ -27,14 +33,29 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptParameterParser;
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
@ -56,7 +77,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> implements
private final Script script;
private ScriptSortType type;
private final ScriptSortType type;
private SortMode sortMode;
@ -104,11 +125,15 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> implements
}
/**
* Defines which distance to use for sorting in the case a document contains multiple geo points.
* Possible values: min and max
* Defines which distance to use for sorting in the case a document contains multiple values.<br>
* For {@link ScriptSortType#STRING}, the set of possible values is restricted to {@link SortMode#MIN} and {@link SortMode#MAX}
*/
public ScriptSortBuilder sortMode(SortMode sortMode) {
Objects.requireNonNull(sortMode, "sort mode cannot be null.");
if (ScriptSortType.STRING.equals(type) && (sortMode == SortMode.SUM || sortMode == SortMode.AVG ||
sortMode == SortMode.MEDIAN)) {
throw new IllegalArgumentException("script sort of type [string] doesn't support mode [" + sortMode + "]");
}
this.sortMode = sortMode;
return this;
}
@ -244,6 +269,75 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> implements
return result;
}
@Override
public SortField build(QueryShardContext context) throws IOException {
final SearchScript searchScript = context.getScriptService().search(
context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap());
MultiValueMode valueMode = null;
if (sortMode != null) {
valueMode = MultiValueMode.fromString(sortMode.toString());
}
boolean reverse = (order == SortOrder.DESC);
if (valueMode == null) {
valueMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
}
final Nested nested = resolveNested(context, nestedPath, nestedFilter);
final IndexFieldData.XFieldComparatorSource fieldComparatorSource;
switch (type) {
case STRING:
fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, valueMode, nested) {
LeafSearchScript leafScript;
@Override
protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException {
leafScript = searchScript.getLeafSearchScript(context);
final BinaryDocValues values = new BinaryDocValues() {
final BytesRefBuilder spare = new BytesRefBuilder();
@Override
public BytesRef get(int docID) {
leafScript.setDocument(docID);
spare.copyChars(leafScript.run().toString());
return spare.get();
}
};
return FieldData.singleton(values, null);
}
@Override
protected void setScorer(Scorer scorer) {
leafScript.setScorer(scorer);
}
};
break;
case NUMBER:
fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) {
LeafSearchScript leafScript;
@Override
protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws IOException {
leafScript = searchScript.getLeafSearchScript(context);
final NumericDoubleValues values = new NumericDoubleValues() {
@Override
public double get(int docID) {
leafScript.setDocument(docID);
return leafScript.runAsDouble();
}
};
return FieldData.singleton(values, null);
}
@Override
protected void setScorer(Scorer scorer) {
leafScript.setScorer(scorer);
}
};
break;
default:
throw new QueryShardException(context, "custom script sort type [" + type + "] not supported");
}
return new SortField("_script", fieldComparatorSource, reverse);
}
@Override
public boolean equals(Object object) {
if (this == object) {

View File

@ -66,7 +66,7 @@ public class ScriptSortParser implements SortParser {
}
@Override
public SortField parse(XContentParser parser, QueryShardContext context) throws Exception {
public SortField parse(XContentParser parser, QueryShardContext context) throws IOException {
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
Script script = null;
ScriptSortType type = null;
@ -140,7 +140,6 @@ public class ScriptSortParser implements SortParser {
sortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
}
// If nested_path is specified, then wrap the `fieldComparatorSource` in a `NestedFieldComparatorSource`
final Nested nested;
if (nestedHelper != null && nestedHelper.getPath() != null) {
BitSetProducer rootDocumentsFilter = context.bitsetFilter(Queries.newNonNestedFilter());
@ -182,7 +181,6 @@ public class ScriptSortParser implements SortParser {
};
break;
case NUMBER:
// TODO: should we rather sort missing values last?
fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, sortMode, nested) {
LeafSearchScript leafScript;
@Override

View File

@ -19,12 +19,21 @@
package org.elasticsearch.search.sort;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitSetProducer;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
import java.util.Objects;
/**
@ -32,6 +41,30 @@ import java.util.Objects;
*/
public abstract class SortBuilder<T extends SortBuilder<?>> implements ToXContent {
protected static Nested resolveNested(QueryShardContext context, String nestedPath, QueryBuilder<?> nestedFilter) throws IOException {
Nested nested = null;
if (nestedPath != null) {
BitSetProducer rootDocumentsFilter = context.bitsetFilter(Queries.newNonNestedFilter());
ObjectMapper nestedObjectMapper = context.getObjectMapper(nestedPath);
if (nestedObjectMapper == null) {
throw new QueryShardException(context, "[nested] failed to find nested object under path [" + nestedPath + "]");
}
if (!nestedObjectMapper.nested().isNested()) {
throw new QueryShardException(context, "[nested] nested object under path [" + nestedPath + "] is not of nested type");
}
Query innerDocumentsQuery;
if (nestedFilter != null) {
context.nestedScope().nextLevel(nestedObjectMapper);
innerDocumentsQuery = QueryBuilder.rewriteQuery(nestedFilter, context).toFilter(context);
context.nestedScope().previousLevel();
} else {
innerDocumentsQuery = nestedObjectMapper.nestedTypeFilter();
}
nested = new Nested(rootDocumentsFilter, innerDocumentsQuery);
}
return nested;
}
protected SortOrder order = SortOrder.ASC;
public static final ParseField ORDER_FIELD = new ParseField("order");

Some files were not shown because too many files have changed in this diff Show More