merged from master

This commit is contained in:
Boaz Leskes 2016-03-22 19:21:28 +01:00
commit 7c8cdf4a71
125 changed files with 2713 additions and 1106 deletions

View File

@ -441,7 +441,6 @@
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergeSchedulerConfig.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergeSchedulerConfig.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]NodeServicesProvider.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]NodeServicesProvider.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]SearchSlowLog.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]SearchSlowLog.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]Analysis.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]AnalysisRegistry.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]AnalysisRegistry.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]AnalysisService.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]AnalysisService.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]CommonGramsTokenFilterFactory.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]analysis[/\\]CommonGramsTokenFilterFactory.java" checks="LineLength" />
@ -1117,7 +1116,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]fieldstats[/\\]FieldStatsIntegrationIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]fieldstats[/\\]FieldStatsIntegrationIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]fieldstats[/\\]FieldStatsTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]fieldstats[/\\]FieldStatsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]AsyncShardFetchTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]AsyncShardFetchTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayIndexStateIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayMetaStateTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayMetaStateTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayModuleTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayModuleTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayServiceTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]gateway[/\\]GatewayServiceTests.java" checks="LineLength" />
@ -1138,7 +1136,6 @@
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyPipeliningEnabledIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]http[/\\]netty[/\\]NettyPipeliningEnabledIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexModuleTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexModuleTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexServiceTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexServiceTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexSettingsTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexWithShadowReplicasIT.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexWithShadowReplicasIT.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexingSlowLogTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]IndexingSlowLogTests.java" checks="LineLength" />
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicySettingsTests.java" checks="LineLength" /> <suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]MergePolicySettingsTests.java" checks="LineLength" />

View File

@ -135,6 +135,8 @@ final class Bootstrap {
JNANatives.trySetMaxNumberOfThreads(); JNANatives.trySetMaxNumberOfThreads();
JNANatives.trySetMaxSizeVirtualMemory();
// init lucene random seed. it will use /dev/urandom where available: // init lucene random seed. it will use /dev/urandom where available:
StringHelper.randomId(); StringHelper.randomId();
} }

View File

@ -123,6 +123,7 @@ final class BootstrapCheck {
if (Constants.LINUX) { if (Constants.LINUX) {
checks.add(new MaxNumberOfThreadsCheck()); checks.add(new MaxNumberOfThreadsCheck());
} }
checks.add(new MaxSizeVirtualMemoryCheck());
return Collections.unmodifiableList(checks); return Collections.unmodifiableList(checks);
} }
@ -249,4 +250,27 @@ final class BootstrapCheck {
} }
static class MaxSizeVirtualMemoryCheck implements Check {
@Override
public boolean check() {
return getMaxSizeVirtualMemory() != Long.MIN_VALUE && getMaxSizeVirtualMemory() != JNACLibrary.RLIM_INFINITY;
}
@Override
public String errorMessage() {
return String.format(
Locale.ROOT,
"max size virtual memory [%d] for user [%s] likely too low, increase to [unlimited]",
getMaxSizeVirtualMemory(),
BootstrapInfo.getSystemProperties().get("user.name"));
}
// visible for testing
long getMaxSizeVirtualMemory() {
return JNANatives.MAX_SIZE_VIRTUAL_MEMORY;
}
}
} }

View File

@ -39,6 +39,7 @@ final class JNACLibrary {
public static final int MCL_CURRENT = 1; public static final int MCL_CURRENT = 1;
public static final int ENOMEM = 12; public static final int ENOMEM = 12;
public static final int RLIMIT_MEMLOCK = Constants.MAC_OS_X ? 6 : 8; public static final int RLIMIT_MEMLOCK = Constants.MAC_OS_X ? 6 : 8;
public static final int RLIMIT_AS = Constants.MAC_OS_X ? 5 : 9;
public static final long RLIM_INFINITY = Constants.MAC_OS_X ? 9223372036854775807L : -1L; public static final long RLIM_INFINITY = Constants.MAC_OS_X ? 9223372036854775807L : -1L;
static { static {

View File

@ -52,6 +52,8 @@ class JNANatives {
// the user ID that owns the running Elasticsearch process // the user ID that owns the running Elasticsearch process
static long MAX_NUMBER_OF_THREADS = -1; static long MAX_NUMBER_OF_THREADS = -1;
static long MAX_SIZE_VIRTUAL_MEMORY = Long.MIN_VALUE;
static void tryMlockall() { static void tryMlockall() {
int errno = Integer.MIN_VALUE; int errno = Integer.MIN_VALUE;
String errMsg = null; String errMsg = null;
@ -124,6 +126,17 @@ class JNANatives {
} }
} }
static void trySetMaxSizeVirtualMemory() {
if (Constants.LINUX || Constants.MAC_OS_X) {
final JNACLibrary.Rlimit rlimit = new JNACLibrary.Rlimit();
if (JNACLibrary.getrlimit(JNACLibrary.RLIMIT_AS, rlimit) == 0) {
MAX_SIZE_VIRTUAL_MEMORY = rlimit.rlim_cur.longValue();
} else {
logger.warn("unable to retrieve max size virtual memory [" + JNACLibrary.strerror(Native.getLastError()) + "]");
}
}
}
static String rlimitToString(long value) { static String rlimitToString(long value) {
assert Constants.LINUX || Constants.MAC_OS_X; assert Constants.LINUX || Constants.MAC_OS_X;
if (value == JNACLibrary.RLIM_INFINITY) { if (value == JNACLibrary.RLIM_INFINITY) {

View File

@ -24,7 +24,6 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.analysis.AnalysisService;
@ -34,7 +33,8 @@ import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.mapper.MapperRegistry;
import java.util.Collections; import java.util.Collections;
import java.util.Map;
import static org.elasticsearch.common.util.set.Sets.newHashSet;
/** /**
* This service is responsible for upgrading legacy index metadata to the current version * This service is responsible for upgrading legacy index metadata to the current version
@ -47,13 +47,13 @@ import java.util.Map;
public class MetaDataIndexUpgradeService extends AbstractComponent { public class MetaDataIndexUpgradeService extends AbstractComponent {
private final MapperRegistry mapperRegistry; private final MapperRegistry mapperRegistry;
private final IndexScopedSettings indexScopedSettigns; private final IndexScopedSettings indexScopedSettings;
@Inject @Inject
public MetaDataIndexUpgradeService(Settings settings, MapperRegistry mapperRegistry, IndexScopedSettings indexScopedSettings) { public MetaDataIndexUpgradeService(Settings settings, MapperRegistry mapperRegistry, IndexScopedSettings indexScopedSettings) {
super(settings); super(settings);
this.mapperRegistry = mapperRegistry; this.mapperRegistry = mapperRegistry;
this.indexScopedSettigns = indexScopedSettings; this.indexScopedSettings = indexScopedSettings;
} }
/** /**
@ -94,8 +94,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
*/ */
private void checkSupportedVersion(IndexMetaData indexMetaData) { private void checkSupportedVersion(IndexMetaData indexMetaData) {
if (indexMetaData.getState() == IndexMetaData.State.OPEN && isSupportedVersion(indexMetaData) == false) { if (indexMetaData.getState() == IndexMetaData.State.OPEN && isSupportedVersion(indexMetaData) == false) {
throw new IllegalStateException("The index [" + indexMetaData.getIndex() + "] was created before v2.0.0.beta1 and wasn't " + throw new IllegalStateException("The index [" + indexMetaData.getIndex() + "] was created before v2.0.0.beta1 and wasn't upgraded."
"upgraded."
+ " This index should be open using a version before " + Version.CURRENT.minimumCompatibilityVersion() + " This index should be open using a version before " + Version.CURRENT.minimumCompatibilityVersion()
+ " and upgraded using the upgrade API."); + " and upgraded using the upgrade API.");
} }
@ -128,12 +127,10 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap()); SimilarityService similarityService = new SimilarityService(indexSettings, Collections.emptyMap());
try (AnalysisService analysisService = new FakeAnalysisService(indexSettings)) { try (AnalysisService analysisService = new FakeAnalysisService(indexSettings)) {
try (MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, try (MapperService mapperService = new MapperService(indexSettings, analysisService, similarityService, mapperRegistry, () -> null)) {
() -> null)) {
for (ObjectCursor<MappingMetaData> cursor : indexMetaData.getMappings().values()) { for (ObjectCursor<MappingMetaData> cursor : indexMetaData.getMappings().values()) {
MappingMetaData mappingMetaData = cursor.value; MappingMetaData mappingMetaData = cursor.value;
mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), MapperService.MergeReason.MAPPING_RECOVERY, mapperService.merge(mappingMetaData.type(), mappingMetaData.source(), MapperService.MergeReason.MAPPING_RECOVERY, false);
false);
} }
} }
} }
@ -147,8 +144,7 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
* Marks index as upgraded so we don't have to test it again * Marks index as upgraded so we don't have to test it again
*/ */
private IndexMetaData markAsUpgraded(IndexMetaData indexMetaData) { private IndexMetaData markAsUpgraded(IndexMetaData indexMetaData) {
Settings settings = Settings.builder().put(indexMetaData.getSettings()).put(IndexMetaData.SETTING_VERSION_UPGRADED, Version Settings settings = Settings.builder().put(indexMetaData.getSettings()).put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.CURRENT).build();
.CURRENT).build();
return IndexMetaData.builder(indexMetaData).settings(settings).build(); return IndexMetaData.builder(indexMetaData).settings(settings).build();
} }
@ -180,45 +176,13 @@ public class MetaDataIndexUpgradeService extends AbstractComponent {
} }
} }
private static final String ARCHIVED_SETTINGS_PREFIX = "archived.";
IndexMetaData archiveBrokenIndexSettings(IndexMetaData indexMetaData) { IndexMetaData archiveBrokenIndexSettings(IndexMetaData indexMetaData) {
Settings settings = indexMetaData.getSettings(); final Settings settings = indexMetaData.getSettings();
Settings.Builder builder = Settings.builder(); final Settings upgrade = indexScopedSettings.archiveUnknownOrBrokenSettings(settings);
boolean changed = false; if (upgrade != settings) {
for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) { return IndexMetaData.builder(indexMetaData).settings(upgrade).build();
try {
Setting<?> setting = indexScopedSettigns.get(entry.getKey());
if (setting != null) {
setting.get(settings);
builder.put(entry.getKey(), entry.getValue());
} else { } else {
if (indexScopedSettigns.isPrivateSetting(entry.getKey()) || entry.getKey().startsWith(ARCHIVED_SETTINGS_PREFIX)) { return indexMetaData;
builder.put(entry.getKey(), entry.getValue());
} else {
changed = true;
logger.warn("[{}] found unknown index setting: {} value: {} - archiving", indexMetaData.getIndex(), entry.getKey
(), entry.getValue());
// we put them back in here such that tools can check from the outside if there are any indices with broken
// settings. The setting can remain there
// but we want users to be aware that some of their setting are broken and they can research why and what they
// need to do to replace them.
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
} }
} }
} catch (IllegalArgumentException ex) {
changed = true;
logger.warn("[{}] found invalid index setting: {} value: {} - archiving", ex, indexMetaData.getIndex(), entry.getKey(),
entry.getValue());
// we put them back in here such that tools can check from the outside if there are any indices with broken settings. The
// setting can remain there
// but we want users to be aware that some of their setting sare broken and they can research why and what they need to
// do to replace them.
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
}
}
return changed ? IndexMetaData.builder(indexMetaData).settings(builder.build()).build() : indexMetaData;
}
} }

View File

@ -1002,4 +1002,8 @@ public class ClusterService extends AbstractLifecycleComponent<ClusterService> {
} }
} }
} }
public ClusterSettings getClusterSettings() {
return clusterSettings;
}
} }

View File

@ -48,6 +48,7 @@ import java.util.stream.Collectors;
* This service offers transactional application of updates settings. * This service offers transactional application of updates settings.
*/ */
public abstract class AbstractScopedSettings extends AbstractComponent { public abstract class AbstractScopedSettings extends AbstractComponent {
public static final String ARCHIVED_SETTINGS_PREFIX = "archived.";
private Settings lastSettingsApplied = Settings.EMPTY; private Settings lastSettingsApplied = Settings.EMPTY;
private final List<SettingUpdater<?>> settingUpdaters = new CopyOnWriteArrayList<>(); private final List<SettingUpdater<?>> settingUpdaters = new CopyOnWriteArrayList<>();
private final Map<String, Setting<?>> complexMatchers; private final Map<String, Setting<?>> complexMatchers;
@ -478,4 +479,53 @@ public abstract class AbstractScopedSettings extends AbstractComponent {
} }
return null; return null;
} }
/**
* Archives broken or unknown settings. Any setting that is not recognized or fails
* validation will be archived. This means the setting is prefixed with {@value ARCHIVED_SETTINGS_PREFIX}
* and remains in the settings object. This can be used to detect broken settings via APIs.
*/
public Settings archiveUnknownOrBrokenSettings(Settings settings) {
Settings.Builder builder = Settings.builder();
boolean changed = false;
for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) {
try {
Setting<?> setting = get(entry.getKey());
if (setting != null) {
setting.get(settings);
builder.put(entry.getKey(), entry.getValue());
} else {
if (entry.getKey().startsWith(ARCHIVED_SETTINGS_PREFIX) || isPrivateSetting(entry.getKey())) {
builder.put(entry.getKey(), entry.getValue());
} else {
changed = true;
logger.warn("found unknown setting: {} value: {} - archiving", entry.getKey(), entry.getValue());
// we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there
// but we want users to be aware that some of their setting are broken and they can research why and what they need to do to replace them.
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
}
}
} catch (IllegalArgumentException ex) {
changed = true;
logger.warn("found invalid setting: {} value: {} - archiving",ex , entry.getKey(), entry.getValue());
// we put them back in here such that tools can check from the outside if there are any indices with broken settings. The setting can remain there
// but we want users to be aware that some of their setting sare broken and they can research why and what they need to do to replace them.
builder.put(ARCHIVED_SETTINGS_PREFIX + entry.getKey(), entry.getValue());
}
}
if (changed) {
return builder.build();
} else {
return settings;
}
}
/**
* Returns <code>true</code> iff the setting is a private setting ie. it should be treated as valid even though it has no internal
* representation. Otherwise <code>false</code>
*/
// TODO this should be replaced by Setting.Property.HIDDEN or something like this.
protected boolean isPrivateSetting(String key) {
return false;
}
} }

View File

@ -171,7 +171,8 @@ public final class IndexScopedSettings extends AbstractScopedSettings {
super.validateSettingKey(setting); super.validateSettingKey(setting);
} }
public boolean isPrivateSetting(String key) { @Override
protected final boolean isPrivateSetting(String key) {
switch (key) { switch (key) {
case IndexMetaData.SETTING_CREATION_DATE: case IndexMetaData.SETTING_CREATION_DATE:
case IndexMetaData.SETTING_INDEX_UUID: case IndexMetaData.SETTING_INDEX_UUID:

View File

@ -24,12 +24,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.gateway.MetaDataStateFormat;
import org.elasticsearch.gateway.MetaStateService;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;

View File

@ -961,6 +961,10 @@ public final class XContentBuilder implements BytesStream, Releasable {
return this; return this;
} }
public XContentBuilder timeValueField(String rawFieldName, String readableFieldName, TimeValue timeValue) throws IOException {
return timeValueField(rawFieldName, readableFieldName, timeValue.millis(), TimeUnit.MILLISECONDS);
}
public XContentBuilder timeValueField(String rawFieldName, String readableFieldName, long rawTime, TimeUnit timeUnit) throws public XContentBuilder timeValueField(String rawFieldName, String readableFieldName, long rawTime, TimeUnit timeUnit) throws
IOException { IOException {
if (humanReadable) { if (humanReadable) {

View File

@ -19,6 +19,8 @@
package org.elasticsearch.gateway; package org.elasticsearch.gateway;
import com.carrotsearch.hppc.ObjectFloatHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterChangedEvent;
@ -28,9 +30,11 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.NodeServicesProvider; import org.elasticsearch.index.NodeServicesProvider;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
@ -84,6 +88,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
} }
} }
ObjectFloatHashMap<Index> indices = new ObjectFloatHashMap<>();
MetaData electedGlobalState = null; MetaData electedGlobalState = null;
int found = 0; int found = 0;
for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) { for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) {
@ -96,30 +101,64 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
} else if (nodeState.metaData().version() > electedGlobalState.version()) { } else if (nodeState.metaData().version() > electedGlobalState.version()) {
electedGlobalState = nodeState.metaData(); electedGlobalState = nodeState.metaData();
} }
for (ObjectCursor<IndexMetaData> cursor : nodeState.metaData().indices().values()) {
indices.addTo(cursor.value.getIndex(), 1);
}
} }
if (found < requiredAllocation) { if (found < requiredAllocation) {
listener.onFailure("found [" + found + "] metadata states, required [" + requiredAllocation + "]"); listener.onFailure("found [" + found + "] metadata states, required [" + requiredAllocation + "]");
return; return;
} }
// verify index metadata // update the global state, and clean the indices, we elect them in the next phase
MetaData.Builder metaDataBuilder = MetaData.builder(electedGlobalState); MetaData.Builder metaDataBuilder = MetaData.builder(electedGlobalState).removeAllIndices();
for (IndexMetaData indexMetaData : electedGlobalState) {
assert !indices.containsKey(null);
final Object[] keys = indices.keys;
for (int i = 0; i < keys.length; i++) {
if (keys[i] != null) {
Index index = (Index) keys[i];
IndexMetaData electedIndexMetaData = null;
int indexMetaDataCount = 0;
for (TransportNodesListGatewayMetaState.NodeGatewayMetaState nodeState : nodesState) {
if (nodeState.metaData() == null) {
continue;
}
IndexMetaData indexMetaData = nodeState.metaData().index(index);
if (indexMetaData == null) {
continue;
}
if (electedIndexMetaData == null) {
electedIndexMetaData = indexMetaData;
} else if (indexMetaData.getVersion() > electedIndexMetaData.getVersion()) {
electedIndexMetaData = indexMetaData;
}
indexMetaDataCount++;
}
if (electedIndexMetaData != null) {
if (indexMetaDataCount < requiredAllocation) {
logger.debug("[{}] found [{}], required [{}], not adding", index, indexMetaDataCount, requiredAllocation);
} // TODO if this logging statement is correct then we are missing an else here
try { try {
if (indexMetaData.getState() == IndexMetaData.State.OPEN) { if (electedIndexMetaData.getState() == IndexMetaData.State.OPEN) {
// verify that we can actually create this index - if not we recover it as closed with lots of warn logs // verify that we can actually create this index - if not we recover it as closed with lots of warn logs
indicesService.verifyIndexMetadata(nodeServicesProvider, indexMetaData); indicesService.verifyIndexMetadata(nodeServicesProvider, electedIndexMetaData);
} }
} catch (Exception e) { } catch (Exception e) {
logger.warn("recovering index {} failed - recovering as closed", e, indexMetaData.getIndex()); logger.warn("recovering index {} failed - recovering as closed", e, electedIndexMetaData.getIndex());
indexMetaData = IndexMetaData.builder(indexMetaData).state(IndexMetaData.State.CLOSE).build(); electedIndexMetaData = IndexMetaData.builder(electedIndexMetaData).state(IndexMetaData.State.CLOSE).build();
metaDataBuilder.put(indexMetaData, true); }
metaDataBuilder.put(electedIndexMetaData, false);
} }
} }
}
final ClusterSettings clusterSettings = clusterService.getClusterSettings();
metaDataBuilder.persistentSettings(clusterSettings.archiveUnknownOrBrokenSettings(metaDataBuilder.persistentSettings()));
metaDataBuilder.transientSettings(clusterSettings.archiveUnknownOrBrokenSettings(metaDataBuilder.transientSettings()));
ClusterState.Builder builder = ClusterState.builder(clusterService.state().getClusterName()); ClusterState.Builder builder = ClusterState.builder(clusterService.state().getClusterName());
builder.metaData(metaDataBuilder); builder.metaData(metaDataBuilder);
listener.onSuccess(builder.build()); listener.onSuccess(builder.build());
} }
public void reset() throws Exception { public void reset() throws Exception {
try { try {
Path[] dataPaths = nodeEnv.nodeDataPaths(); Path[] dataPaths = nodeEnv.nodeDataPaths();

View File

@ -67,8 +67,10 @@ import org.elasticsearch.env.Environment;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.Reader; import java.io.Reader;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
@ -163,7 +165,8 @@ public class Analysis {
NAMED_STOP_WORDS = unmodifiableMap(namedStopWords); NAMED_STOP_WORDS = unmodifiableMap(namedStopWords);
} }
public static CharArraySet parseWords(Environment env, Settings settings, String name, CharArraySet defaultWords, Map<String, Set<?>> namedWords, boolean ignoreCase) { public static CharArraySet parseWords(Environment env, Settings settings, String name, CharArraySet defaultWords,
Map<String, Set<?>> namedWords, boolean ignoreCase) {
String value = settings.get(name); String value = settings.get(name);
if (value != null) { if (value != null) {
if ("_none_".equals(value)) { if ("_none_".equals(value)) {
@ -237,12 +240,17 @@ public class Analysis {
} }
} }
final Path wordListFile = env.configFile().resolve(wordListPath); final Path path = env.configFile().resolve(wordListPath);
try (BufferedReader reader = FileSystemUtils.newBufferedReader(wordListFile.toUri().toURL(), StandardCharsets.UTF_8)) { try (BufferedReader reader = FileSystemUtils.newBufferedReader(path.toUri().toURL(), StandardCharsets.UTF_8)) {
return loadWordList(reader, "#"); return loadWordList(reader, "#");
} catch (CharacterCodingException ex) {
String message = String.format(Locale.ROOT,
"Unsupported character encoding detected while reading %s_path: %s - files must be UTF-8 encoded",
settingPrefix, path.toString());
throw new IllegalArgumentException(message, ex);
} catch (IOException ioe) { } catch (IOException ioe) {
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix); String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, path.toString());
throw new IllegalArgumentException(message, ioe); throw new IllegalArgumentException(message, ioe);
} }
} }
@ -256,7 +264,7 @@ public class Analysis {
} else { } else {
br = new BufferedReader(reader); br = new BufferedReader(reader);
} }
String word = null; String word;
while ((word = br.readLine()) != null) { while ((word = br.readLine()) != null) {
if (!Strings.hasText(word)) { if (!Strings.hasText(word)) {
continue; continue;
@ -283,13 +291,16 @@ public class Analysis {
if (filePath == null) { if (filePath == null) {
return null; return null;
} }
final Path path = env.configFile().resolve(filePath); final Path path = env.configFile().resolve(filePath);
try { try {
return FileSystemUtils.newBufferedReader(path.toUri().toURL(), StandardCharsets.UTF_8); return FileSystemUtils.newBufferedReader(path.toUri().toURL(), StandardCharsets.UTF_8);
} catch (CharacterCodingException ex) {
String message = String.format(Locale.ROOT,
"Unsupported character encoding detected while reading %s_path: %s files must be UTF-8 encoded",
settingPrefix, path.toString());
throw new IllegalArgumentException(message, ex);
} catch (IOException ioe) { } catch (IOException ioe) {
String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix); String message = String.format(Locale.ROOT, "IOException while reading %s_path: %s", settingPrefix, path.toString());
throw new IllegalArgumentException(message, ioe); throw new IllegalArgumentException(message, ioe);
} }
} }

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -32,6 +33,7 @@ import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -192,6 +194,14 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
public boolean useTermQueryWithQueryString() { public boolean useTermQueryWithQueryString() {
return true; return true;
} }
@Override
public Query termQuery(Object value, QueryShardContext context) {
if (isEnabled() == false) {
throw new IllegalStateException("Cannot run [exists] queries if the [_field_names] field is disabled");
}
return super.termQuery(value, context);
}
} }
private FieldNamesFieldMapper(Settings indexSettings, MappedFieldType existing) { private FieldNamesFieldMapper(Settings indexSettings, MappedFieldType existing) {

View File

@ -19,11 +19,15 @@
package org.elasticsearch.index.mapper.object; package org.elasticsearch.index.mapper.object;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -33,30 +37,41 @@ import java.util.TreeMap;
/** /**
* *
*/ */
public class DynamicTemplate { public class DynamicTemplate implements ToXContent {
public static enum MatchType { public static enum MatchType {
SIMPLE, SIMPLE {
REGEX; @Override
public String toString() {
return "simple";
}
},
REGEX {
@Override
public String toString() {
return "regex";
}
};
public static MatchType fromString(String value) { public static MatchType fromString(String value) {
if ("simple".equals(value)) { for (MatchType v : values()) {
return SIMPLE; if (v.toString().equals(value)) {
} else if ("regex".equals(value)) { return v;
return REGEX; }
} }
throw new IllegalArgumentException("No matching pattern matched on [" + value + "]"); throw new IllegalArgumentException("No matching pattern matched on [" + value + "]");
} }
} }
public static DynamicTemplate parse(String name, Map<String, Object> conf) throws MapperParsingException { public static DynamicTemplate parse(String name, Map<String, Object> conf,
Version indexVersionCreated) throws MapperParsingException {
String match = null; String match = null;
String pathMatch = null; String pathMatch = null;
String unmatch = null; String unmatch = null;
String pathUnmatch = null; String pathUnmatch = null;
Map<String, Object> mapping = null; Map<String, Object> mapping = null;
String matchMappingType = null; String matchMappingType = null;
String matchPattern = "simple"; String matchPattern = MatchType.SIMPLE.toString();
for (Map.Entry<String, Object> entry : conf.entrySet()) { for (Map.Entry<String, Object> entry : conf.entrySet()) {
String propName = Strings.toUnderscoreCase(entry.getKey()); String propName = Strings.toUnderscoreCase(entry.getKey());
@ -74,22 +89,18 @@ public class DynamicTemplate {
matchPattern = entry.getValue().toString(); matchPattern = entry.getValue().toString();
} else if ("mapping".equals(propName)) { } else if ("mapping".equals(propName)) {
mapping = (Map<String, Object>) entry.getValue(); mapping = (Map<String, Object>) entry.getValue();
} else if (indexVersionCreated.onOrAfter(Version.V_5_0_0)) {
// unknown parameters were ignored before but still carried through serialization
// so we need to ignore them at parsing time for old indices
throw new IllegalArgumentException("Illegal dynamic template parameter: [" + propName + "]");
} }
} }
if (match == null && pathMatch == null && matchMappingType == null) { return new DynamicTemplate(name, pathMatch, pathUnmatch, match, unmatch, matchMappingType, MatchType.fromString(matchPattern), mapping);
throw new MapperParsingException("template must have match, path_match or match_mapping_type set");
}
if (mapping == null) {
throw new MapperParsingException("template must have mapping set");
}
return new DynamicTemplate(name, conf, pathMatch, pathUnmatch, match, unmatch, matchMappingType, MatchType.fromString(matchPattern), mapping);
} }
private final String name; private final String name;
private final Map<String, Object> conf;
private final String pathMatch; private final String pathMatch;
private final String pathUnmatch; private final String pathUnmatch;
@ -104,9 +115,14 @@ public class DynamicTemplate {
private final Map<String, Object> mapping; private final Map<String, Object> mapping;
public DynamicTemplate(String name, Map<String, Object> conf, String pathMatch, String pathUnmatch, String match, String unmatch, String matchMappingType, MatchType matchType, Map<String, Object> mapping) { public DynamicTemplate(String name, String pathMatch, String pathUnmatch, String match, String unmatch, String matchMappingType, MatchType matchType, Map<String, Object> mapping) {
if (match == null && pathMatch == null && matchMappingType == null) {
throw new MapperParsingException("template must have match, path_match or match_mapping_type set");
}
if (mapping == null) {
throw new MapperParsingException("template must have mapping set");
}
this.name = name; this.name = name;
this.conf = new TreeMap<>(conf);
this.pathMatch = pathMatch; this.pathMatch = pathMatch;
this.pathUnmatch = pathUnmatch; this.pathUnmatch = pathUnmatch;
this.match = match; this.match = match;
@ -120,10 +136,6 @@ public class DynamicTemplate {
return this.name; return this.name;
} }
public Map<String, Object> conf() {
return this.conf;
}
public boolean match(ContentPath path, String name, String dynamicType) { public boolean match(ContentPath path, String name, String dynamicType) {
if (pathMatch != null && !patternMatch(pathMatch, path.pathAsText(name))) { if (pathMatch != null && !patternMatch(pathMatch, path.pathAsText(name))) {
return false; return false;
@ -148,10 +160,6 @@ public class DynamicTemplate {
return true; return true;
} }
public boolean hasType() {
return mapping.containsKey("type");
}
public String mappingType(String dynamicType) { public String mappingType(String dynamicType) {
return mapping.containsKey("type") ? mapping.get("type").toString().replace("{dynamic_type}", dynamicType).replace("{dynamicType}", dynamicType) : dynamicType; return mapping.containsKey("type") ? mapping.get("type").toString().replace("{dynamic_type}", dynamicType).replace("{dynamicType}", dynamicType) : dynamicType;
} }
@ -200,40 +208,29 @@ public class DynamicTemplate {
} }
@Override @Override
public boolean equals(Object o) { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (this == o) { builder.startObject();
return true; if (match != null) {
builder.field("match", match);
} }
if (o == null || getClass() != o.getClass()) { if (pathMatch != null) {
return false; builder.field("path_match", pathMatch);
} }
if (unmatch != null) {
DynamicTemplate that = (DynamicTemplate) o; builder.field("unmatch", unmatch);
// check if same matching, if so, replace the mapping
if (match != null ? !match.equals(that.match) : that.match != null) {
return false;
} }
if (matchMappingType != null ? !matchMappingType.equals(that.matchMappingType) : that.matchMappingType != null) { if (pathUnmatch != null) {
return false; builder.field("path_unmatch", pathUnmatch);
} }
if (matchType != that.matchType) { if (matchMappingType != null) {
return false; builder.field("match_mapping_type", matchMappingType);
} }
if (unmatch != null ? !unmatch.equals(that.unmatch) : that.unmatch != null) { if (matchType != MatchType.SIMPLE) {
return false; builder.field("match_pattern", matchType);
} }
// use a sorted map for consistent serialization
return true; builder.field("mapping", new TreeMap<>(mapping));
} builder.endObject();
return builder;
@Override
public int hashCode() {
// check if same matching, if so, replace the mapping
int result = match != null ? match.hashCode() : 0;
result = 31 * result + (unmatch != null ? unmatch.hashCode() : 0);
result = 31 * result + (matchType != null ? matchType.hashCode() : 0);
result = 31 * result + (matchMappingType != null ? matchMappingType.hashCode() : 0);
return result;
} }
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.mapper.object; package org.elasticsearch.index.mapper.object;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.FormatDateTimeFormatter;
@ -140,14 +141,15 @@ public class RootObjectMapper extends ObjectMapper {
String fieldName = Strings.toUnderscoreCase(entry.getKey()); String fieldName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue(); Object fieldNode = entry.getValue();
if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder) if (parseObjectOrDocumentTypeProperties(fieldName, fieldNode, parserContext, builder)
|| processField(builder, fieldName, fieldNode)) { || processField(builder, fieldName, fieldNode, parserContext.indexVersionCreated())) {
iterator.remove(); iterator.remove();
} }
} }
return builder; return builder;
} }
protected boolean processField(ObjectMapper.Builder builder, String fieldName, Object fieldNode) { protected boolean processField(ObjectMapper.Builder builder, String fieldName, Object fieldNode,
Version indexVersionCreated) {
if (fieldName.equals("date_formats") || fieldName.equals("dynamic_date_formats")) { if (fieldName.equals("date_formats") || fieldName.equals("dynamic_date_formats")) {
List<FormatDateTimeFormatter> dateTimeFormatters = new ArrayList<>(); List<FormatDateTimeFormatter> dateTimeFormatters = new ArrayList<>();
if (fieldNode instanceof List) { if (fieldNode instanceof List) {
@ -185,7 +187,10 @@ public class RootObjectMapper extends ObjectMapper {
throw new MapperParsingException("A dynamic template must be defined with a name"); throw new MapperParsingException("A dynamic template must be defined with a name");
} }
Map.Entry<String, Object> entry = tmpl.entrySet().iterator().next(); Map.Entry<String, Object> entry = tmpl.entrySet().iterator().next();
((Builder) builder).add(DynamicTemplate.parse(entry.getKey(), (Map<String, Object>) entry.getValue())); String templateName = entry.getKey();
Map<String, Object> templateParams = (Map<String, Object>) entry.getValue();
DynamicTemplate template = DynamicTemplate.parse(templateName, templateParams, indexVersionCreated);
((Builder) builder).add(template);
} }
return true; return true;
} else if (fieldName.equals("date_detection")) { } else if (fieldName.equals("date_detection")) {
@ -329,8 +334,7 @@ public class RootObjectMapper extends ObjectMapper {
builder.startArray("dynamic_templates"); builder.startArray("dynamic_templates");
for (DynamicTemplate dynamicTemplate : dynamicTemplates) { for (DynamicTemplate dynamicTemplate : dynamicTemplates) {
builder.startObject(); builder.startObject();
builder.field(dynamicTemplate.name()); builder.field(dynamicTemplate.name(), dynamicTemplate);
builder.map(dynamicTemplate.conf());
builder.endObject(); builder.endObject();
} }
builder.endArray(); builder.endArray();

View File

@ -23,18 +23,16 @@ import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.Objects; import java.util.Objects;
/** /**
@ -82,38 +80,18 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
return Queries.newMatchNoDocsQuery(); return Queries.newMatchNoDocsQuery();
} }
ObjectMapper objectMapper = context.getObjectMapper(fieldPattern); final Collection<String> fields;
if (objectMapper != null) { if (context.getObjectMapper(fieldPattern) != null) {
// automatic make the object mapper pattern // the _field_names field also indexes objects, so we don't have to
fieldPattern = fieldPattern + ".*"; // do any more work to support exists queries on whole objects
} fields = Collections.singleton(fieldPattern);
} else {
Collection<String> fields = context.simpleMatchToIndexNames(fieldPattern); fields = context.simpleMatchToIndexNames(fieldPattern);
if (fields.isEmpty()) {
// no fields exists, so we should not match anything
return Queries.newMatchNoDocsQuery();
} }
BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder(); BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder();
for (String field : fields) { for (String field : fields) {
MappedFieldType fieldType = context.fieldMapper(field); Query filter = fieldNamesFieldType.termQuery(field, context);
Query filter = null;
if (fieldNamesFieldType.isEnabled()) {
final String f;
if (fieldType != null) {
f = fieldType.name();
} else {
f = field;
}
filter = fieldNamesFieldType.termQuery(f, context);
}
// if _field_names are not indexed, we need to go the slow way
if (filter == null && fieldType != null) {
filter = fieldType.rangeQuery(null, null, true, true);
}
if (filter == null) {
filter = new TermRangeQuery(field, null, null, true, true);
}
boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD); boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD);
} }
return new ConstantScoreQuery(boolFilterBuilder.build()); return new ConstantScoreQuery(boolFilterBuilder.build());

View File

@ -105,6 +105,7 @@ public class QueryShardContext extends QueryRewriteContext {
this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields(); this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields();
this.indicesQueriesRegistry = indicesQueriesRegistry; this.indicesQueriesRegistry = indicesQueriesRegistry;
this.percolatorQueryCache = percolatorQueryCache; this.percolatorQueryCache = percolatorQueryCache;
this.nestedScope = new NestedScope();
} }
public QueryShardContext(QueryShardContext source) { public QueryShardContext(QueryShardContext source) {
@ -113,6 +114,7 @@ public class QueryShardContext extends QueryRewriteContext {
} }
@Override
public QueryShardContext clone() { public QueryShardContext clone() {
return new QueryShardContext(indexSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry, percolatorQueryCache); return new QueryShardContext(indexSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, scriptService, indicesQueriesRegistry, percolatorQueryCache);
} }

View File

@ -22,10 +22,6 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.action.fieldstats.IndexConstraint;
import org.elasticsearch.action.fieldstats.IndexConstraint.Comparison;
import org.elasticsearch.action.fieldstats.IndexConstraint.Property;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -259,8 +255,8 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
} }
@Override @Override
protected QueryBuilder<?> doRewrite(QueryRewriteContext queryShardContext) throws IOException { protected QueryBuilder<?> doRewrite(QueryRewriteContext queryRewriteContext) throws IOException {
FieldStatsProvider fieldStatsProvider = queryShardContext.getFieldStatsProvider(); FieldStatsProvider fieldStatsProvider = queryRewriteContext.getFieldStatsProvider();
// If the fieldStatsProvider is null we are not on the shard and cannot // If the fieldStatsProvider is null we are not on the shard and cannot
// rewrite so just return without rewriting // rewrite so just return without rewriting
if (fieldStatsProvider != null) { if (fieldStatsProvider != null) {
@ -271,17 +267,10 @@ public class RangeQueryBuilder extends AbstractQueryBuilder<RangeQueryBuilder> i
case DISJOINT: case DISJOINT:
return new MatchNoneQueryBuilder(); return new MatchNoneQueryBuilder();
case WITHIN: case WITHIN:
FieldStats<?> fieldStats = fieldStatsProvider.get(fieldName); if (from != null || to != null) {
if (!(fieldStats.getMinValue().equals(from) && fieldStats.getMaxValue().equals(to) && includeUpper && includeLower)) {
// Rebuild the range query with the bounds for this shard.
// The includeLower/Upper values are preserved only if the
// bound has not been changed by the rewrite
RangeQueryBuilder newRangeQuery = new RangeQueryBuilder(fieldName); RangeQueryBuilder newRangeQuery = new RangeQueryBuilder(fieldName);
String dateFormatString = format == null ? null : format.format(); newRangeQuery.from(null);
newRangeQuery.from(fieldStats.getMinValue(), includeLower || fieldStats.match( newRangeQuery.to(null);
new IndexConstraint(fieldName, Property.MIN, Comparison.GT, fieldStats.stringValueOf(from, dateFormatString))));
newRangeQuery.to(fieldStats.getMaxValue(), includeUpper || fieldStats.match(
new IndexConstraint(fieldName, Property.MAX, Comparison.LT, fieldStats.stringValueOf(to, dateFormatString))));
newRangeQuery.format = format; newRangeQuery.format = format;
newRangeQuery.timeZone = timeZone; newRangeQuery.timeZone = timeZone;
return newRangeQuery; return newRangeQuery;

View File

@ -20,9 +20,7 @@
package org.elasticsearch.index.query.support; package org.elasticsearch.index.query.support;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitSetProducer;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -45,46 +43,18 @@ public class NestedInnerQueryParseSupport {
protected final QueryParseContext parseContext; protected final QueryParseContext parseContext;
private BytesReference source; private BytesReference source;
private Query innerQuery;
private Query innerFilter; private Query innerFilter;
protected String path; protected String path;
private boolean filterParsed = false; private boolean filterParsed = false;
private boolean queryParsed = false;
protected boolean queryFound = false;
protected boolean filterFound = false; protected boolean filterFound = false;
protected BitSetProducer parentFilter;
protected Query childFilter;
protected ObjectMapper nestedObjectMapper; protected ObjectMapper nestedObjectMapper;
private ObjectMapper parentObjectMapper;
public NestedInnerQueryParseSupport(XContentParser parser, QueryShardContext context) { public NestedInnerQueryParseSupport(XContentParser parser, QueryShardContext context) {
shardContext = context; shardContext = context;
parseContext = shardContext.parseContext(); parseContext = shardContext.parseContext();
shardContext.reset(parser); shardContext.reset(parser);
}
public NestedInnerQueryParseSupport(QueryShardContext context) {
this.parseContext = context.parseContext();
this.shardContext = context;
}
public void query() throws IOException {
if (path != null) {
setPathLevel();
try {
innerQuery = parseContext.parseInnerQueryBuilder().toQuery(this.shardContext);
} finally {
resetPathLevel();
}
queryParsed = true;
} else {
source = XContentFactory.smileBuilder().copyCurrentStructure(parseContext.parser()).bytes();
}
queryFound = true;
} }
public void filter() throws IOException { public void filter() throws IOException {
@ -103,35 +73,6 @@ public class NestedInnerQueryParseSupport {
filterFound = true; filterFound = true;
} }
public Query getInnerQuery() throws IOException {
if (queryParsed) {
return innerQuery;
} else {
if (path == null) {
throw new QueryShardException(shardContext, "[nested] requires 'path' field");
}
if (!queryFound) {
throw new QueryShardException(shardContext, "[nested] requires either 'query' or 'filter' field");
}
XContentParser old = parseContext.parser();
try {
XContentParser innerParser = XContentHelper.createParser(source);
parseContext.parser(innerParser);
setPathLevel();
try {
innerQuery = parseContext.parseInnerQueryBuilder().toQuery(this.shardContext);
} finally {
resetPathLevel();
}
queryParsed = true;
return innerQuery;
} finally {
parseContext.parser(old);
}
}
}
public Query getInnerFilter() throws IOException { public Query getInnerFilter() throws IOException {
if (filterParsed) { if (filterParsed) {
return innerFilter; return innerFilter;
@ -178,27 +119,12 @@ public class NestedInnerQueryParseSupport {
return nestedObjectMapper; return nestedObjectMapper;
} }
public boolean queryFound() {
return queryFound;
}
public boolean filterFound() { public boolean filterFound() {
return filterFound; return filterFound;
} }
public ObjectMapper getParentObjectMapper() {
return parentObjectMapper;
}
private void setPathLevel() { private void setPathLevel() {
ObjectMapper objectMapper = shardContext.nestedScope().getObjectMapper(); shardContext.nestedScope().nextLevel(nestedObjectMapper);
if (objectMapper == null) {
parentFilter = shardContext.bitsetFilter(Queries.newNonNestedFilter());
} else {
parentFilter = shardContext.bitsetFilter(objectMapper.nestedTypeFilter());
}
childFilter = nestedObjectMapper.nestedTypeFilter();
parentObjectMapper = shardContext.nestedScope().nextLevel(nestedObjectMapper);
} }
private void resetPathLevel() { private void resetPathLevel() {

View File

@ -135,22 +135,22 @@ public class RestIndicesAction extends AbstractCatAction {
table.addCell("fielddata.evictions", "sibling:pri;alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions"); table.addCell("fielddata.evictions", "sibling:pri;alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions");
table.addCell("pri.fielddata.evictions", "default:false;text-align:right;desc:fielddata evictions"); table.addCell("pri.fielddata.evictions", "default:false;text-align:right;desc:fielddata evictions");
table.addCell("query_cache.memory_size", "sibling:pri;alias:fcm,queryCacheMemory;default:false;text-align:right;desc:used query cache"); table.addCell("query_cache.memory_size", "sibling:pri;alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used query cache");
table.addCell("pri.query_cache.memory_size", "default:false;text-align:right;desc:used query cache"); table.addCell("pri.query_cache.memory_size", "default:false;text-align:right;desc:used query cache");
table.addCell("query_cache.evictions", "sibling:pri;alias:fce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions"); table.addCell("query_cache.evictions", "sibling:pri;alias:qce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions");
table.addCell("pri.query_cache.evictions", "default:false;text-align:right;desc:query cache evictions"); table.addCell("pri.query_cache.evictions", "default:false;text-align:right;desc:query cache evictions");
table.addCell("request_cache.memory_size", "sibling:pri;alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used request cache"); table.addCell("request_cache.memory_size", "sibling:pri;alias:rcm,requestCacheMemory;default:false;text-align:right;desc:used request cache");
table.addCell("pri.request_cache.memory_size", "default:false;text-align:right;desc:used request cache"); table.addCell("pri.request_cache.memory_size", "default:false;text-align:right;desc:used request cache");
table.addCell("request_cache.evictions", "sibling:pri;alias:qce,queryCacheEvictions;default:false;text-align:right;desc:request cache evictions"); table.addCell("request_cache.evictions", "sibling:pri;alias:rce,requestCacheEvictions;default:false;text-align:right;desc:request cache evictions");
table.addCell("pri.request_cache.evictions", "default:false;text-align:right;desc:request cache evictions"); table.addCell("pri.request_cache.evictions", "default:false;text-align:right;desc:request cache evictions");
table.addCell("request_cache.hit_count", "sibling:pri;alias:qchc,queryCacheHitCount;default:false;text-align:right;desc:request cache hit count"); table.addCell("request_cache.hit_count", "sibling:pri;alias:rchc,requestCacheHitCount;default:false;text-align:right;desc:request cache hit count");
table.addCell("pri.request_cache.hit_count", "default:false;text-align:right;desc:request cache hit count"); table.addCell("pri.request_cache.hit_count", "default:false;text-align:right;desc:request cache hit count");
table.addCell("request_cache.miss_count", "sibling:pri;alias:qcmc,queryCacheMissCount;default:false;text-align:right;desc:request cache miss count"); table.addCell("request_cache.miss_count", "sibling:pri;alias:rcmc,requestCacheMissCount;default:false;text-align:right;desc:request cache miss count");
table.addCell("pri.request_cache.miss_count", "default:false;text-align:right;desc:request cache miss count"); table.addCell("pri.request_cache.miss_count", "default:false;text-align:right;desc:request cache miss count");
table.addCell("flush.total", "sibling:pri;alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes"); table.addCell("flush.total", "sibling:pri;alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes");

View File

@ -150,13 +150,13 @@ public class RestNodesAction extends AbstractCatAction {
table.addCell("fielddata.memory_size", "alias:fm,fielddataMemory;default:false;text-align:right;desc:used fielddata cache"); table.addCell("fielddata.memory_size", "alias:fm,fielddataMemory;default:false;text-align:right;desc:used fielddata cache");
table.addCell("fielddata.evictions", "alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions"); table.addCell("fielddata.evictions", "alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions");
table.addCell("query_cache.memory_size", "alias:fcm,queryCacheMemory;default:false;text-align:right;desc:used query cache"); table.addCell("query_cache.memory_size", "alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used query cache");
table.addCell("query_cache.evictions", "alias:fce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions"); table.addCell("query_cache.evictions", "alias:qce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions");
table.addCell("request_cache.memory_size", "alias:qcm,requestCacheMemory;default:false;text-align:right;desc:used request cache"); table.addCell("request_cache.memory_size", "alias:rcm,requestCacheMemory;default:false;text-align:right;desc:used request cache");
table.addCell("request_cache.evictions", "alias:qce,requestCacheEvictions;default:false;text-align:right;desc:request cache evictions"); table.addCell("request_cache.evictions", "alias:rce,requestCacheEvictions;default:false;text-align:right;desc:request cache evictions");
table.addCell("request_cache.hit_count", "alias:qchc,requestCacheHitCount;default:false;text-align:right;desc:request cache hit counts"); table.addCell("request_cache.hit_count", "alias:rchc,requestCacheHitCount;default:false;text-align:right;desc:request cache hit counts");
table.addCell("request_cache.miss_count", "alias:qcmc,requestCacheMissCount;default:false;text-align:right;desc:request cache miss counts"); table.addCell("request_cache.miss_count", "alias:rcmc,requestCacheMissCount;default:false;text-align:right;desc:request cache miss counts");
table.addCell("flush.total", "alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes"); table.addCell("flush.total", "alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes");
table.addCell("flush.total_time", "alias:ftt,flushTotalTime;default:false;text-align:right;desc:time spent in flush"); table.addCell("flush.total_time", "alias:ftt,flushTotalTime;default:false;text-align:right;desc:time spent in flush");

View File

@ -109,8 +109,8 @@ public class RestShardsAction extends AbstractCatAction {
table.addCell("fielddata.memory_size", "alias:fm,fielddataMemory;default:false;text-align:right;desc:used fielddata cache"); table.addCell("fielddata.memory_size", "alias:fm,fielddataMemory;default:false;text-align:right;desc:used fielddata cache");
table.addCell("fielddata.evictions", "alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions"); table.addCell("fielddata.evictions", "alias:fe,fielddataEvictions;default:false;text-align:right;desc:fielddata evictions");
table.addCell("query_cache.memory_size", "alias:fcm,queryCacheMemory;default:false;text-align:right;desc:used query cache"); table.addCell("query_cache.memory_size", "alias:qcm,queryCacheMemory;default:false;text-align:right;desc:used query cache");
table.addCell("query_cache.evictions", "alias:fce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions"); table.addCell("query_cache.evictions", "alias:qce,queryCacheEvictions;default:false;text-align:right;desc:query cache evictions");
table.addCell("flush.total", "alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes"); table.addCell("flush.total", "alias:ft,flushTotal;default:false;text-align:right;desc:number of flushes");
table.addCell("flush.total_time", "alias:ftt,flushTotalTime;default:false;text-align:right;desc:time spent in flush"); table.addCell("flush.total_time", "alias:ftt,flushTotalTime;default:false;text-align:right;desc:time spent in flush");

View File

@ -549,8 +549,14 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher, indexShard, scriptService, pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher,
defaultSearchTimeout, fetchPhase); defaultSearchTimeout, fetchPhase);
context.getQueryShardContext().setFieldStatsProvider(new FieldStatsProvider(engineSearcher, indexService.mapperService())); context.getQueryShardContext().setFieldStatsProvider(new FieldStatsProvider(engineSearcher, indexService.mapperService()));
request.rewrite(context.getQueryShardContext());
SearchContext.setCurrent(context); SearchContext.setCurrent(context);
request.rewrite(context.getQueryShardContext());
// reset that we have used nowInMillis from the context since it may
// have been rewritten so its no longer in the query and the request can
// be cached. If it is still present in the request (e.g. in a range
// aggregation) it will still be caught when the aggregation is
// evaluated.
context.resetNowInMillisUsed();
try { try {
if (request.scroll() != null) { if (request.scroll() != null) {
context.scrollContext(new ScrollContext()); context.scrollContext(new ScrollContext());

View File

@ -149,6 +149,10 @@ public abstract class SearchContext implements Releasable {
return nowInMillisUsed; return nowInMillisUsed;
} }
public final void resetNowInMillisUsed() {
this.nowInMillisUsed = false;
}
protected abstract long nowInMillisImpl(); protected abstract long nowInMillisImpl();
public abstract ScrollContext scrollContext(); public abstract ScrollContext scrollContext();

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.sort; package org.elasticsearch.search.sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
@ -27,8 +28,14 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
@ -47,6 +54,13 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
public static final ParseField SORT_MODE = new ParseField("mode"); public static final ParseField SORT_MODE = new ParseField("mode");
public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type"); public static final ParseField UNMAPPED_TYPE = new ParseField("unmapped_type");
/**
* special field name to sort by index order
*/
public static final String DOC_FIELD_NAME = "_doc";
private static final SortField SORT_DOC = new SortField(null, SortField.Type.DOC);
private static final SortField SORT_DOC_REVERSE = new SortField(null, SortField.Type.DOC, true);
private final String fieldName; private final String fieldName;
private Object missing; private Object missing;
@ -161,7 +175,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
* TODO should the above getters and setters be deprecated/ changed in * TODO should the above getters and setters be deprecated/ changed in
* favour of real getters and setters? * favour of real getters and setters?
*/ */
public FieldSortBuilder setNestedFilter(QueryBuilder nestedFilter) { public FieldSortBuilder setNestedFilter(QueryBuilder<?> nestedFilter) {
this.nestedFilter = nestedFilter; this.nestedFilter = nestedFilter;
return this; return this;
} }
@ -170,7 +184,7 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
* Returns the nested filter that the nested objects should match with in * Returns the nested filter that the nested objects should match with in
* order to be taken into account for sorting. * order to be taken into account for sorting.
*/ */
public QueryBuilder getNestedFilter() { public QueryBuilder<?> getNestedFilter() {
return this.nestedFilter; return this.nestedFilter;
} }
@ -219,6 +233,49 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> implements S
return builder; return builder;
} }
@Override
public SortField build(QueryShardContext context) throws IOException {
if (DOC_FIELD_NAME.equals(fieldName)) {
if (order == SortOrder.DESC) {
return SORT_DOC_REVERSE;
} else {
return SORT_DOC;
}
} else {
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
if (unmappedType != null) {
fieldType = context.getMapperService().unmappedFieldType(unmappedType);
} else {
throw new QueryShardException(context, "No mapping found for [" + fieldName + "] in order to sort on");
}
}
if (!fieldType.isSortable()) {
throw new QueryShardException(context, "Sorting not supported for field[" + fieldName + "]");
}
MultiValueMode localSortMode = null;
if (sortMode != null) {
localSortMode = MultiValueMode.fromString(sortMode.toString());
}
if (fieldType.isNumeric() == false && (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) {
throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields");
}
boolean reverse = (order == SortOrder.DESC);
if (localSortMode == null) {
localSortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
}
final Nested nested = resolveNested(context, nestedPath, nestedFilter);
IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.getForField(fieldType)
.comparatorSource(missing, localSortMode, nested);
return new SortField(fieldType.name(), fieldComparatorSource, reverse);
}
}
@Override @Override
public boolean equals(Object other) { public boolean equals(Object other) {
if (this == other) { if (this == other) {

View File

@ -19,8 +19,18 @@
package org.elasticsearch.search.sort; package org.elasticsearch.search.sort;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoDistance.FixedSourceDistance;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
@ -28,8 +38,17 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -45,6 +64,14 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
public static final String NAME = "_geo_distance"; public static final String NAME = "_geo_distance";
public static final boolean DEFAULT_COERCE = false; public static final boolean DEFAULT_COERCE = false;
public static final boolean DEFAULT_IGNORE_MALFORMED = false; public static final boolean DEFAULT_IGNORE_MALFORMED = false;
public static final ParseField UNIT_FIELD = new ParseField("unit");
public static final ParseField REVERSE_FIELD = new ParseField("reverse");
public static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
public static final ParseField SORTMODE_FIELD = new ParseField("mode", "sort_mode");
public static final ParseField NESTED_PATH_FIELD = new ParseField("nested_path");
public static final ParseField NESTED_FILTER_FIELD = new ParseField("nested_filter");
static final GeoDistanceSortBuilder PROTOTYPE = new GeoDistanceSortBuilder("", -1, -1); static final GeoDistanceSortBuilder PROTOTYPE = new GeoDistanceSortBuilder("", -1, -1);
@ -280,22 +307,22 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
} }
builder.endArray(); builder.endArray();
builder.field("unit", unit); builder.field(UNIT_FIELD.getPreferredName(), unit);
builder.field("distance_type", geoDistance.name().toLowerCase(Locale.ROOT)); builder.field(DISTANCE_TYPE_FIELD.getPreferredName(), geoDistance.name().toLowerCase(Locale.ROOT));
builder.field(ORDER_FIELD.getPreferredName(), order); builder.field(ORDER_FIELD.getPreferredName(), order);
if (sortMode != null) { if (sortMode != null) {
builder.field("mode", sortMode); builder.field(SORTMODE_FIELD.getPreferredName(), sortMode);
} }
if (nestedPath != null) { if (nestedPath != null) {
builder.field("nested_path", nestedPath); builder.field(NESTED_PATH_FIELD.getPreferredName(), nestedPath);
} }
if (nestedFilter != null) { if (nestedFilter != null) {
builder.field("nested_filter", nestedFilter, params); builder.field(NESTED_FILTER_FIELD.getPreferredName(), nestedFilter, params);
} }
builder.field("coerce", coerce); builder.field(COERCE_FIELD.getPreferredName(), coerce);
builder.field("ignore_malformed", ignoreMalformed); builder.field(IGNORE_MALFORMED_FIELD.getPreferredName(), ignoreMalformed);
builder.endObject(); builder.endObject();
return builder; return builder;
@ -383,6 +410,7 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
@Override @Override
public GeoDistanceSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException { public GeoDistanceSortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException {
XContentParser parser = context.parser(); XContentParser parser = context.parser();
ParseFieldMatcher parseFieldMatcher = context.parseFieldMatcher();
String fieldName = null; String fieldName = null;
List<GeoPoint> geoPoints = new ArrayList<>(); List<GeoPoint> geoPoints = new ArrayList<>();
DistanceUnit unit = DistanceUnit.DEFAULT; DistanceUnit unit = DistanceUnit.DEFAULT;
@ -405,40 +433,37 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
fieldName = currentName; fieldName = currentName;
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
// the json in the format of -> field : { lat : 30, lon : 12 } if (parseFieldMatcher.match(currentName, NESTED_FILTER_FIELD)) {
if ("nested_filter".equals(currentName) || "nestedFilter".equals(currentName)) {
// TODO Note to remember: while this is kept as a QueryBuilder internally,
// we need to make sure to call toFilter() on it once on the shard
// (e.g. in the new build() method)
nestedFilter = context.parseInnerQueryBuilder(); nestedFilter = context.parseInnerQueryBuilder();
} else { } else {
// the json in the format of -> field : { lat : 30, lon : 12 }
fieldName = currentName; fieldName = currentName;
GeoPoint point = new GeoPoint(); GeoPoint point = new GeoPoint();
GeoUtils.parseGeoPoint(parser, point); GeoUtils.parseGeoPoint(parser, point);
geoPoints.add(point); geoPoints.add(point);
} }
} else if (token.isValue()) { } else if (token.isValue()) {
if ("reverse".equals(currentName)) { if (parseFieldMatcher.match(currentName, REVERSE_FIELD)) {
order = parser.booleanValue() ? SortOrder.DESC : SortOrder.ASC; order = parser.booleanValue() ? SortOrder.DESC : SortOrder.ASC;
} else if ("order".equals(currentName)) { } else if (parseFieldMatcher.match(currentName, ORDER_FIELD)) {
order = SortOrder.fromString(parser.text()); order = SortOrder.fromString(parser.text());
} else if ("unit".equals(currentName)) { } else if (parseFieldMatcher.match(currentName, UNIT_FIELD)) {
unit = DistanceUnit.fromString(parser.text()); unit = DistanceUnit.fromString(parser.text());
} else if ("distance_type".equals(currentName) || "distanceType".equals(currentName)) { } else if (parseFieldMatcher.match(currentName, DISTANCE_TYPE_FIELD)) {
geoDistance = GeoDistance.fromString(parser.text()); geoDistance = GeoDistance.fromString(parser.text());
} else if ("coerce".equals(currentName) || "normalize".equals(currentName)) { } else if (parseFieldMatcher.match(currentName, COERCE_FIELD)) {
coerce = parser.booleanValue(); coerce = parser.booleanValue();
if (coerce == true) { if (coerce == true) {
ignoreMalformed = true; ignoreMalformed = true;
} }
} else if ("ignore_malformed".equals(currentName)) { } else if (parseFieldMatcher.match(currentName, IGNORE_MALFORMED_FIELD)) {
boolean ignore_malformed_value = parser.booleanValue(); boolean ignore_malformed_value = parser.booleanValue();
if (coerce == false) { if (coerce == false) {
ignoreMalformed = ignore_malformed_value; ignoreMalformed = ignore_malformed_value;
} }
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) { } else if (parseFieldMatcher.match(currentName, SORTMODE_FIELD)) {
sortMode = SortMode.fromString(parser.text()); sortMode = SortMode.fromString(parser.text());
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) { } else if (parseFieldMatcher.match(currentName, NESTED_PATH_FIELD)) {
nestedPath = parser.text(); nestedPath = parser.text();
} else { } else {
GeoPoint point = new GeoPoint(); GeoPoint point = new GeoPoint();
@ -461,7 +486,85 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
result.coerce(coerce); result.coerce(coerce);
result.ignoreMalformed(ignoreMalformed); result.ignoreMalformed(ignoreMalformed);
return result; return result;
}
@Override
public SortField build(QueryShardContext context) throws IOException {
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
List<GeoPoint> localPoints = new ArrayList<GeoPoint>();
for (GeoPoint geoPoint : this.points) {
localPoints.add(new GeoPoint(geoPoint));
}
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
for (GeoPoint point : localPoints) {
if (GeoUtils.isValidLatitude(point.lat()) == false) {
throw new ElasticsearchParseException("illegal latitude value [{}] for [GeoDistanceSort]", point.lat());
}
if (GeoUtils.isValidLongitude(point.lon()) == false) {
throw new ElasticsearchParseException("illegal longitude value [{}] for [GeoDistanceSort]", point.lon());
}
}
}
if (coerce) {
for (GeoPoint point : localPoints) {
GeoUtils.normalizePoint(point, coerce, coerce);
}
}
boolean reverse = (order == SortOrder.DESC);
final MultiValueMode finalSortMode;
if (sortMode == null) {
finalSortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
} else {
finalSortMode = MultiValueMode.fromString(sortMode.toString());
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new IllegalArgumentException("failed to find mapper for [" + fieldName + "] for geo distance based sort");
}
final IndexGeoPointFieldData geoIndexFieldData = context.getForField(fieldType);
final FixedSourceDistance[] distances = new FixedSourceDistance[localPoints.size()];
for (int i = 0; i< localPoints.size(); i++) {
distances[i] = geoDistance.fixedSourceDistance(localPoints.get(i).lat(), localPoints.get(i).lon(), unit);
}
final Nested nested = resolveNested(context, nestedPath, nestedFilter);
IndexFieldData.XFieldComparatorSource geoDistanceComparatorSource = new IndexFieldData.XFieldComparatorSource() {
@Override
public SortField.Type reducedType() {
return SortField.Type.DOUBLE;
}
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
return new FieldComparator.DoubleComparator(numHits, null, null) {
@Override
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
final MultiGeoPointValues geoPointValues = geoIndexFieldData.load(context).getGeoPointValues();
final SortedNumericDoubleValues distanceValues = GeoDistance.distanceValues(geoPointValues, distances);
final NumericDoubleValues selectedValues;
if (nested == null) {
selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE);
} else {
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE, rootDocs, innerDocs,
context.reader().maxDoc());
}
return selectedValues.getRawDoubleValues();
}
};
}
};
return new SortField(fieldName, geoDistanceComparatorSource, reverse);
} }
static void parseGeoPoints(XContentParser parser, List<GeoPoint> geoPoints) throws IOException { static void parseGeoPoints(XContentParser parser, List<GeoPoint> geoPoints) throws IOException {

View File

@ -62,7 +62,7 @@ public class GeoDistanceSortParser implements SortParser {
} }
@Override @Override
public SortField parse(XContentParser parser, QueryShardContext context) throws Exception { public SortField parse(XContentParser parser, QueryShardContext context) throws IOException {
String fieldName = null; String fieldName = null;
List<GeoPoint> geoPoints = new ArrayList<>(); List<GeoPoint> geoPoints = new ArrayList<>();
DistanceUnit unit = DistanceUnit.DEFAULT; DistanceUnit unit = DistanceUnit.DEFAULT;

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.sort; package org.elasticsearch.search.sort;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
@ -27,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
@ -40,6 +42,8 @@ public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> implements S
static final ScoreSortBuilder PROTOTYPE = new ScoreSortBuilder(); static final ScoreSortBuilder PROTOTYPE = new ScoreSortBuilder();
public static final ParseField REVERSE_FIELD = new ParseField("reverse"); public static final ParseField REVERSE_FIELD = new ParseField("reverse");
public static final ParseField ORDER_FIELD = new ParseField("order"); public static final ParseField ORDER_FIELD = new ParseField("order");
private static final SortField SORT_SCORE = new SortField(null, SortField.Type.SCORE);
private static final SortField SORT_SCORE_REVERSE = new SortField(null, SortField.Type.SCORE, true);
public ScoreSortBuilder() { public ScoreSortBuilder() {
// order defaults to desc when sorting on the _score // order defaults to desc when sorting on the _score
@ -84,6 +88,14 @@ public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> implements S
return result; return result;
} }
public SortField build(QueryShardContext context) {
if (order == SortOrder.DESC) {
return SORT_SCORE;
} else {
return SORT_SCORE_REVERSE;
}
}
@Override @Override
public boolean equals(Object object) { public boolean equals(Object object) {
if (this == object) { if (this == object) {

View File

@ -19,6 +19,12 @@
package org.elasticsearch.search.sort; package org.elasticsearch.search.sort;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
@ -27,14 +33,29 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.Script.ScriptField;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptParameterParser; import org.elasticsearch.script.ScriptParameterParser;
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue; import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
@ -56,7 +77,7 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> implements
private final Script script; private final Script script;
private ScriptSortType type; private final ScriptSortType type;
private SortMode sortMode; private SortMode sortMode;
@ -104,11 +125,15 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> implements
} }
/** /**
* Defines which distance to use for sorting in the case a document contains multiple geo points. * Defines which distance to use for sorting in the case a document contains multiple values.<br>
* Possible values: min and max * For {@link ScriptSortType#STRING}, the set of possible values is restricted to {@link SortMode#MIN} and {@link SortMode#MAX}
*/ */
public ScriptSortBuilder sortMode(SortMode sortMode) { public ScriptSortBuilder sortMode(SortMode sortMode) {
Objects.requireNonNull(sortMode, "sort mode cannot be null."); Objects.requireNonNull(sortMode, "sort mode cannot be null.");
if (ScriptSortType.STRING.equals(type) && (sortMode == SortMode.SUM || sortMode == SortMode.AVG ||
sortMode == SortMode.MEDIAN)) {
throw new IllegalArgumentException("script sort of type [string] doesn't support mode [" + sortMode + "]");
}
this.sortMode = sortMode; this.sortMode = sortMode;
return this; return this;
} }
@ -244,6 +269,75 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> implements
return result; return result;
} }
@Override
public SortField build(QueryShardContext context) throws IOException {
final SearchScript searchScript = context.getScriptService().search(
context.lookup(), script, ScriptContext.Standard.SEARCH, Collections.emptyMap());
MultiValueMode valueMode = null;
if (sortMode != null) {
valueMode = MultiValueMode.fromString(sortMode.toString());
}
boolean reverse = (order == SortOrder.DESC);
if (valueMode == null) {
valueMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
}
final Nested nested = resolveNested(context, nestedPath, nestedFilter);
final IndexFieldData.XFieldComparatorSource fieldComparatorSource;
switch (type) {
case STRING:
fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, valueMode, nested) {
LeafSearchScript leafScript;
@Override
protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException {
leafScript = searchScript.getLeafSearchScript(context);
final BinaryDocValues values = new BinaryDocValues() {
final BytesRefBuilder spare = new BytesRefBuilder();
@Override
public BytesRef get(int docID) {
leafScript.setDocument(docID);
spare.copyChars(leafScript.run().toString());
return spare.get();
}
};
return FieldData.singleton(values, null);
}
@Override
protected void setScorer(Scorer scorer) {
leafScript.setScorer(scorer);
}
};
break;
case NUMBER:
fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) {
LeafSearchScript leafScript;
@Override
protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws IOException {
leafScript = searchScript.getLeafSearchScript(context);
final NumericDoubleValues values = new NumericDoubleValues() {
@Override
public double get(int docID) {
leafScript.setDocument(docID);
return leafScript.runAsDouble();
}
};
return FieldData.singleton(values, null);
}
@Override
protected void setScorer(Scorer scorer) {
leafScript.setScorer(scorer);
}
};
break;
default:
throw new QueryShardException(context, "custom script sort type [" + type + "] not supported");
}
return new SortField("_script", fieldComparatorSource, reverse);
}
@Override @Override
public boolean equals(Object object) { public boolean equals(Object object) {
if (this == object) { if (this == object) {

View File

@ -66,7 +66,7 @@ public class ScriptSortParser implements SortParser {
} }
@Override @Override
public SortField parse(XContentParser parser, QueryShardContext context) throws Exception { public SortField parse(XContentParser parser, QueryShardContext context) throws IOException {
ScriptParameterParser scriptParameterParser = new ScriptParameterParser(); ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
Script script = null; Script script = null;
ScriptSortType type = null; ScriptSortType type = null;
@ -140,7 +140,6 @@ public class ScriptSortParser implements SortParser {
sortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN; sortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
} }
// If nested_path is specified, then wrap the `fieldComparatorSource` in a `NestedFieldComparatorSource`
final Nested nested; final Nested nested;
if (nestedHelper != null && nestedHelper.getPath() != null) { if (nestedHelper != null && nestedHelper.getPath() != null) {
BitSetProducer rootDocumentsFilter = context.bitsetFilter(Queries.newNonNestedFilter()); BitSetProducer rootDocumentsFilter = context.bitsetFilter(Queries.newNonNestedFilter());
@ -182,7 +181,6 @@ public class ScriptSortParser implements SortParser {
}; };
break; break;
case NUMBER: case NUMBER:
// TODO: should we rather sort missing values last?
fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, sortMode, nested) { fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, sortMode, nested) {
LeafSearchScript leafScript; LeafSearchScript leafScript;
@Override @Override

View File

@ -19,12 +19,21 @@
package org.elasticsearch.search.sort; package org.elasticsearch.search.sort;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitSetProducer;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import java.io.IOException;
import java.util.Objects; import java.util.Objects;
/** /**
@ -32,6 +41,30 @@ import java.util.Objects;
*/ */
public abstract class SortBuilder<T extends SortBuilder<?>> implements ToXContent { public abstract class SortBuilder<T extends SortBuilder<?>> implements ToXContent {
protected static Nested resolveNested(QueryShardContext context, String nestedPath, QueryBuilder<?> nestedFilter) throws IOException {
Nested nested = null;
if (nestedPath != null) {
BitSetProducer rootDocumentsFilter = context.bitsetFilter(Queries.newNonNestedFilter());
ObjectMapper nestedObjectMapper = context.getObjectMapper(nestedPath);
if (nestedObjectMapper == null) {
throw new QueryShardException(context, "[nested] failed to find nested object under path [" + nestedPath + "]");
}
if (!nestedObjectMapper.nested().isNested()) {
throw new QueryShardException(context, "[nested] nested object under path [" + nestedPath + "] is not of nested type");
}
Query innerDocumentsQuery;
if (nestedFilter != null) {
context.nestedScope().nextLevel(nestedObjectMapper);
innerDocumentsQuery = QueryBuilder.rewriteQuery(nestedFilter, context).toFilter(context);
context.nestedScope().previousLevel();
} else {
innerDocumentsQuery = nestedObjectMapper.nestedTypeFilter();
}
nested = new Nested(rootDocumentsFilter, innerDocumentsQuery);
}
return nested;
}
protected SortOrder order = SortOrder.ASC; protected SortOrder order = SortOrder.ASC;
public static final ParseField ORDER_FIELD = new ParseField("order"); public static final ParseField ORDER_FIELD = new ParseField("order");

View File

@ -19,9 +19,11 @@
package org.elasticsearch.search.sort; package org.elasticsearch.search.sort;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException; import java.io.IOException;
@ -36,5 +38,10 @@ public interface SortBuilderParser<T extends ToXContent> extends NamedWriteable<
* call * call
* @return the new item * @return the new item
*/ */
SortBuilder fromXContent(QueryParseContext context, String elementName) throws IOException; T fromXContent(QueryParseContext context, String elementName) throws IOException;
/**
* Create a @link {@link SortField} from this builder.
*/
SortField build(QueryShardContext context) throws IOException;
} }

View File

@ -30,10 +30,11 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport; import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
@ -49,7 +50,7 @@ import static java.util.Collections.unmodifiableMap;
*/ */
public class SortParseElement implements SearchParseElement { public class SortParseElement implements SearchParseElement {
public static final SortField SORT_SCORE = new SortField(null, SortField.Type.SCORE); private static final SortField SORT_SCORE = new SortField(null, SortField.Type.SCORE);
private static final SortField SORT_SCORE_REVERSE = new SortField(null, SortField.Type.SCORE, true); private static final SortField SORT_SCORE_REVERSE = new SortField(null, SortField.Type.SCORE, true);
private static final SortField SORT_DOC = new SortField(null, SortField.Type.DOC); private static final SortField SORT_DOC = new SortField(null, SortField.Type.DOC);
private static final SortField SORT_DOC_REVERSE = new SortField(null, SortField.Type.DOC, true); private static final SortField SORT_DOC_REVERSE = new SortField(null, SortField.Type.DOC, true);
@ -75,26 +76,8 @@ public class SortParseElement implements SearchParseElement {
} }
@Override @Override
public void parse(XContentParser parser, SearchContext context) throws Exception { public void parse(XContentParser parser, SearchContext context) throws IOException {
XContentParser.Token token = parser.currentToken(); List<SortField> sortFields = parse(parser, context.getQueryShardContext());
List<SortField> sortFields = new ArrayList<>(2);
if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
addCompoundSortField(parser, context, sortFields);
} else if (token == XContentParser.Token.VALUE_STRING) {
addSortField(context, sortFields, parser.text(), false, null, null, null, null);
} else {
throw new IllegalArgumentException("malformed sort format, within the sort array, an object, or an actual string are allowed");
}
}
} else if (token == XContentParser.Token.VALUE_STRING) {
addSortField(context, sortFields, parser.text(), false, null, null, null, null);
} else if (token == XContentParser.Token.START_OBJECT) {
addCompoundSortField(parser, context, sortFields);
} else {
throw new IllegalArgumentException("malformed sort format, either start with array, object, or an actual string");
}
if (!sortFields.isEmpty()) { if (!sortFields.isEmpty()) {
// optimize if we just sort on score non reversed, we don't really need sorting // optimize if we just sort on score non reversed, we don't really need sorting
boolean sort; boolean sort;
@ -114,7 +97,30 @@ public class SortParseElement implements SearchParseElement {
} }
} }
private void addCompoundSortField(XContentParser parser, SearchContext context, List<SortField> sortFields) throws Exception { List<SortField> parse(XContentParser parser, QueryShardContext context) throws IOException {
XContentParser.Token token = parser.currentToken();
List<SortField> sortFields = new ArrayList<>(2);
if (token == XContentParser.Token.START_ARRAY) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
addCompoundSortField(parser, context, sortFields);
} else if (token == XContentParser.Token.VALUE_STRING) {
addSortField(context, sortFields, parser.text(), false, null, null, null, null);
} else {
throw new IllegalArgumentException("malformed sort format, within the sort array, an object, or an actual string are allowed");
}
}
} else if (token == XContentParser.Token.VALUE_STRING) {
addSortField(context, sortFields, parser.text(), false, null, null, null, null);
} else if (token == XContentParser.Token.START_OBJECT) {
addCompoundSortField(parser, context, sortFields);
} else {
throw new IllegalArgumentException("malformed sort format, either start with array, object, or an actual string");
}
return sortFields;
}
private void addCompoundSortField(XContentParser parser, QueryShardContext context, List<SortField> sortFields) throws IOException {
XContentParser.Token token; XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
@ -138,7 +144,7 @@ public class SortParseElement implements SearchParseElement {
addSortField(context, sortFields, fieldName, reverse, unmappedType, missing, sortMode, nestedFilterParseHelper); addSortField(context, sortFields, fieldName, reverse, unmappedType, missing, sortMode, nestedFilterParseHelper);
} else { } else {
if (PARSERS.containsKey(fieldName)) { if (PARSERS.containsKey(fieldName)) {
sortFields.add(PARSERS.get(fieldName).parse(parser, context.getQueryShardContext())); sortFields.add(PARSERS.get(fieldName).parse(parser, context));
} else { } else {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
@ -160,7 +166,7 @@ public class SortParseElement implements SearchParseElement {
sortMode = MultiValueMode.fromString(parser.text()); sortMode = MultiValueMode.fromString(parser.text());
} else if ("nested_path".equals(innerJsonName) || "nestedPath".equals(innerJsonName)) { } else if ("nested_path".equals(innerJsonName) || "nestedPath".equals(innerJsonName)) {
if (nestedFilterParseHelper == null) { if (nestedFilterParseHelper == null) {
nestedFilterParseHelper = new NestedInnerQueryParseSupport(parser, context.getQueryShardContext()); nestedFilterParseHelper = new NestedInnerQueryParseSupport(parser, context);
} }
nestedFilterParseHelper.setPath(parser.text()); nestedFilterParseHelper.setPath(parser.text());
} else { } else {
@ -169,7 +175,7 @@ public class SortParseElement implements SearchParseElement {
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if ("nested_filter".equals(innerJsonName) || "nestedFilter".equals(innerJsonName)) { if ("nested_filter".equals(innerJsonName) || "nestedFilter".equals(innerJsonName)) {
if (nestedFilterParseHelper == null) { if (nestedFilterParseHelper == null) {
nestedFilterParseHelper = new NestedInnerQueryParseSupport(parser, context.getQueryShardContext()); nestedFilterParseHelper = new NestedInnerQueryParseSupport(parser, context);
} }
nestedFilterParseHelper.filter(); nestedFilterParseHelper.filter();
} else { } else {
@ -184,7 +190,7 @@ public class SortParseElement implements SearchParseElement {
} }
} }
private void addSortField(SearchContext context, List<SortField> sortFields, String fieldName, boolean reverse, String unmappedType, @Nullable final String missing, MultiValueMode sortMode, NestedInnerQueryParseSupport nestedHelper) throws IOException { private void addSortField(QueryShardContext context, List<SortField> sortFields, String fieldName, boolean reverse, String unmappedType, @Nullable final String missing, MultiValueMode sortMode, NestedInnerQueryParseSupport nestedHelper) throws IOException {
if (SCORE_FIELD_NAME.equals(fieldName)) { if (SCORE_FIELD_NAME.equals(fieldName)) {
if (reverse) { if (reverse) {
sortFields.add(SORT_SCORE_REVERSE); sortFields.add(SORT_SCORE_REVERSE);
@ -198,28 +204,19 @@ public class SortParseElement implements SearchParseElement {
sortFields.add(SORT_DOC); sortFields.add(SORT_DOC);
} }
} else { } else {
MappedFieldType fieldType = context.smartNameFieldType(fieldName); MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) { if (fieldType == null) {
if (unmappedType != null) { if (unmappedType != null) {
fieldType = context.mapperService().unmappedFieldType(unmappedType); fieldType = context.getMapperService().unmappedFieldType(unmappedType);
} else { } else {
throw new SearchParseException(context, "No mapping found for [" + fieldName + "] in order to sort on", null); throw new QueryShardException(context, "No mapping found for [" + fieldName + "] in order to sort on");
} }
} }
if (!fieldType.isSortable()) { if (!fieldType.isSortable()) {
throw new SearchParseException(context, "Sorting not supported for field[" + fieldName + "]", null); throw new QueryShardException(context, "Sorting not supported for field[" + fieldName + "]");
} }
// Enable when we also know how to detect fields that do tokenize, but only emit one token
/*if (fieldMapper instanceof StringFieldMapper) {
StringFieldMapper stringFieldMapper = (StringFieldMapper) fieldMapper;
if (stringFieldMapper.fieldType().tokenized()) {
// Fail early
throw new SearchParseException(context, "Can't sort on tokenized string field[" + fieldName + "]");
}
}*/
// We only support AVG and SUM on number based fields // We only support AVG and SUM on number based fields
if (fieldType.isNumeric() == false && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) { if (fieldType.isNumeric() == false && (sortMode == MultiValueMode.SUM || sortMode == MultiValueMode.AVG)) {
sortMode = null; sortMode = null;
@ -230,7 +227,7 @@ public class SortParseElement implements SearchParseElement {
final Nested nested; final Nested nested;
if (nestedHelper != null && nestedHelper.getPath() != null) { if (nestedHelper != null && nestedHelper.getPath() != null) {
BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()); BitSetProducer rootDocumentsFilter = context.bitsetFilter(Queries.newNonNestedFilter());
Query innerDocumentsQuery; Query innerDocumentsQuery;
if (nestedHelper.filterFound()) { if (nestedHelper.filterFound()) {
innerDocumentsQuery = nestedHelper.getInnerFilter(); innerDocumentsQuery = nestedHelper.getInnerFilter();
@ -242,7 +239,7 @@ public class SortParseElement implements SearchParseElement {
nested = null; nested = null;
} }
IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.fieldData().getForField(fieldType) IndexFieldData.XFieldComparatorSource fieldComparatorSource = context.getForField(fieldType)
.comparatorSource(missing, sortMode, nested); .comparatorSource(missing, sortMode, nested);
sortFields.add(new SortField(fieldType.name(), fieldComparatorSource, reverse)); sortFields.add(new SortField(fieldType.name(), fieldComparatorSource, reverse));
} }

View File

@ -23,6 +23,8 @@ import org.apache.lucene.search.SortField;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
/** /**
* *
*/ */
@ -30,5 +32,5 @@ public interface SortParser {
String[] names(); String[] names();
SortField parse(XContentParser parser, QueryShardContext context) throws Exception; SortField parse(XContentParser parser, QueryShardContext context) throws IOException;
} }

View File

@ -157,6 +157,33 @@ public class BootstrapCheckTests extends ESTestCase {
BootstrapCheck.check(true, Collections.singletonList(check)); BootstrapCheck.check(true, Collections.singletonList(check));
} }
public void testMaxSizeVirtualMemory() {
final long limit = JNACLibrary.RLIM_INFINITY;
final AtomicLong maxSizeVirtualMemory = new AtomicLong(randomInt());
final BootstrapCheck.MaxSizeVirtualMemoryCheck check = new BootstrapCheck.MaxSizeVirtualMemoryCheck() {
@Override
long getMaxSizeVirtualMemory() {
return maxSizeVirtualMemory.get();
}
};
try {
BootstrapCheck.check(true, Collections.singletonList(check));
fail("should have failed due to max size virtual memory too low");
} catch (final RuntimeException e) {
assertThat(e.getMessage(), containsString("max size virtual memory"));
}
maxSizeVirtualMemory.set(limit);
BootstrapCheck.check(true, Collections.singletonList(check));
// nothing should happen if max size virtual memory is not
// available
maxSizeVirtualMemory.set(Long.MIN_VALUE);
BootstrapCheck.check(true, Collections.singletonList(check));
}
public void testEnforceLimits() { public void testEnforceLimits() {
final Set<Setting> enforceSettings = BootstrapCheck.enforceSettings(); final Set<Setting> enforceSettings = BootstrapCheck.enforceSettings();
final Setting setting = randomFrom(Arrays.asList(enforceSettings.toArray(new Setting[enforceSettings.size()]))); final Setting setting = randomFrom(Arrays.asList(enforceSettings.toArray(new Setting[enforceSettings.size()])));

View File

@ -28,6 +28,7 @@ import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
@ -35,6 +36,7 @@ import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.discovery.zen.elect.ElectMasterService;
import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
@ -46,6 +48,8 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.test.ESIntegTestCase.Scope;
import org.elasticsearch.test.InternalTestCluster.RestartCallback; import org.elasticsearch.test.InternalTestCluster.RestartCallback;
import java.io.IOException;
import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.matchQuery; import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
@ -65,14 +69,16 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
logger.info("--> creating test index, with meta routing"); logger.info("--> creating test index, with meta routing");
client().admin().indices().prepareCreate("test") client().admin().indices().prepareCreate("test")
.addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing").field("required", true).endObject().endObject().endObject()) .addMapping("type1", XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("_routing")
.field("required", true).endObject().endObject().endObject())
.execute().actionGet(); .execute().actionGet();
logger.info("--> waiting for yellow status"); logger.info("--> waiting for yellow status");
ensureYellow(); ensureYellow();
logger.info("--> verify meta _routing required exists"); logger.info("--> verify meta _routing required exists");
MappingMetaData mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test").mapping("type1"); MappingMetaData mappingMd = client().admin().cluster().prepareState().execute().actionGet().getState().metaData()
.index("test").mapping("type1");
assertThat(mappingMd.routing().required(), equalTo(true)); assertThat(mappingMd.routing().required(), equalTo(true));
logger.info("--> restarting nodes..."); logger.info("--> restarting nodes...");
@ -101,7 +107,8 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
ClusterStateResponse stateResponse = client().admin().cluster().prepareState().execute().actionGet(); ClusterStateResponse stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metaData().index("test").getState(), equalTo(IndexMetaData.State.OPEN)); assertThat(stateResponse.getState().metaData().index("test").getState(), equalTo(IndexMetaData.State.OPEN));
assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries)); assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries));
assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards)); assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(),
equalTo(test.totalNumShards));
logger.info("--> indexing a simple document"); logger.info("--> indexing a simple document");
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get(); client().prepareIndex("test", "type1", "1").setSource("field1", "value1").get();
@ -138,7 +145,8 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
stateResponse = client().admin().cluster().prepareState().execute().actionGet(); stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metaData().index("test").getState(), equalTo(IndexMetaData.State.OPEN)); assertThat(stateResponse.getState().metaData().index("test").getState(), equalTo(IndexMetaData.State.OPEN));
assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries)); assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries));
assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards)); assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(),
equalTo(test.totalNumShards));
logger.info("--> trying to get the indexed document on the first index"); logger.info("--> trying to get the indexed document on the first index");
GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); GetResponse getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
@ -176,7 +184,8 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
stateResponse = client().admin().cluster().prepareState().execute().actionGet(); stateResponse = client().admin().cluster().prepareState().execute().actionGet();
assertThat(stateResponse.getState().metaData().index("test").getState(), equalTo(IndexMetaData.State.OPEN)); assertThat(stateResponse.getState().metaData().index("test").getState(), equalTo(IndexMetaData.State.OPEN));
assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries)); assertThat(stateResponse.getState().routingTable().index("test").shards().size(), equalTo(test.numPrimaries));
assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(), equalTo(test.totalNumShards)); assertThat(stateResponse.getState().routingTable().index("test").shardsWithState(ShardRoutingState.STARTED).size(),
equalTo(test.totalNumShards));
logger.info("--> trying to get the indexed document on the first round (before close and shutdown)"); logger.info("--> trying to get the indexed document on the first round (before close and shutdown)");
getResponse = client().prepareGet("test", "type1", "1").execute().actionGet(); getResponse = client().prepareGet("test", "type1", "1").execute().actionGet();
@ -202,7 +211,8 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build()); internalCluster().startNode(settingsBuilder().put(Node.NODE_DATA_SETTING.getKey(), false).build());
logger.info("--> waiting for test index to be created"); logger.info("--> waiting for test index to be created");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setIndices("test").execute().actionGet(); ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setIndices("test")
.execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false)); assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify we have an index"); logger.info("--> verify we have an index");
@ -236,7 +246,8 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefresh(true).execute().actionGet(); client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefresh(true).execute().actionGet();
logger.info("--> waiting for green status"); logger.info("--> waiting for green status");
ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet(); ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus()
.setWaitForNodes("2").execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false)); assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify 1 doc in the index"); logger.info("--> verify 1 doc in the index");
@ -255,7 +266,8 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
client().admin().indices().prepareOpen("test").execute().actionGet(); client().admin().indices().prepareOpen("test").execute().actionGet();
logger.info("--> waiting for green status"); logger.info("--> waiting for green status");
health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2").execute().actionGet(); health = client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().setWaitForNodes("2")
.execute().actionGet();
assertThat(health.isTimedOut(), equalTo(false)); assertThat(health.isTimedOut(), equalTo(false));
logger.info("--> verify 1 doc in the index"); logger.info("--> verify 1 doc in the index");
@ -300,7 +312,8 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
ensureGreen(); ensureGreen();
// make sure that any other events were processed // make sure that any other events were processed
assertFalse(client().admin().cluster().prepareHealth().setWaitForRelocatingShards(0).setWaitForEvents(Priority.LANGUID).get().isTimedOut()); assertFalse(client().admin().cluster().prepareHealth().setWaitForRelocatingShards(0).setWaitForEvents(Priority.LANGUID).get()
.isTimedOut());
logger.info("--> verify we read the right thing through alias"); logger.info("--> verify we read the right thing through alias");
assertThat(client().prepareGet("test", "type1", "2").execute().actionGet().isExists(), equalTo(true)); assertThat(client().prepareGet("test", "type1", "2").execute().actionGet().isExists(), equalTo(true));
@ -492,4 +505,44 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
logger.info("--> verify 1 doc in the index"); logger.info("--> verify 1 doc in the index");
assertHitCount(client().prepareSearch().setQuery(matchQuery("field1", "value one")).get(), 1L); assertHitCount(client().prepareSearch().setQuery(matchQuery("field1", "value one")).get(), 1L);
} }
public void testArchiveBrokenClusterSettings() throws Exception {
logger.info("--> starting one node");
internalCluster().startNode();
client().prepareIndex("test", "type1", "1").setSource("field1", "value1").setRefresh(true).execute().actionGet();
logger.info("--> waiting for green status");
if (usually()) {
ensureYellow();
} else {
internalCluster().startNode();
client().admin().cluster()
.health(Requests.clusterHealthRequest()
.waitForGreenStatus()
.waitForEvents(Priority.LANGUID)
.waitForRelocatingShards(0).waitForNodes("2")).actionGet();
}
ClusterState state = client().admin().cluster().prepareState().get().getState();
MetaData metaData = state.getMetaData();
for (NodeEnvironment nodeEnv : internalCluster().getInstances(NodeEnvironment.class)) {
MetaData brokenMeta = MetaData.builder(metaData).persistentSettings(Settings.builder()
.put(metaData.persistentSettings()).put("this.is.unknown", true)
.put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), "broken").build()).build();
MetaData.FORMAT.write(brokenMeta, metaData.version(), nodeEnv.nodeDataPaths());
}
internalCluster().fullRestart();
ensureYellow("test"); // wait for state recovery
state = client().admin().cluster().prepareState().get().getState();
assertEquals("true", state.metaData().persistentSettings().get("archived.this.is.unknown"));
assertEquals("broken", state.metaData().persistentSettings().get("archived."
+ ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()));
// delete these settings
client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder().putNull("archived.*")).get();
state = client().admin().cluster().prepareState().get().getState();
assertNull(state.metaData().persistentSettings().get("archived.this.is.unknown"));
assertNull(state.metaData().persistentSettings().get("archived."
+ ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey()));
assertHitCount(client().prepareSearch().setQuery(matchAllQuery()).get(), 1L);
}
} }

View File

@ -20,6 +20,7 @@ package org.elasticsearch.index;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.IndexScopedSettings;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
@ -28,10 +29,12 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.VersionUtils;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -40,10 +43,10 @@ import java.util.function.Function;
public class IndexSettingsTests extends ESTestCase { public class IndexSettingsTests extends ESTestCase {
public void testRunListener() { public void testRunListener() {
Version version = VersionUtils.getPreviousVersion(); Version version = VersionUtils.getPreviousVersion();
Settings theSettings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build(); Settings theSettings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version)
.put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build();
final AtomicInteger integer = new AtomicInteger(0); final AtomicInteger integer = new AtomicInteger(0);
Setting<Integer> integerSetting = Setting.intSetting("index.test.setting.int", -1, Setting<Integer> integerSetting = Setting.intSetting("index.test.setting.int", -1,
Property.Dynamic, Property.IndexScope); Property.Dynamic, Property.IndexScope);
@ -57,7 +60,8 @@ public class IndexSettingsTests extends ESTestCase {
assertFalse(settings.updateIndexMetaData(metaData)); assertFalse(settings.updateIndexMetaData(metaData));
assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap()); assertEquals(metaData.getSettings().getAsMap(), settings.getSettings().getAsMap());
assertEquals(0, integer.get()); assertEquals(0, integer.get());
assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(theSettings).put("index.test.setting.int", 42).build()))); assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(theSettings).put("index.test.setting.int", 42)
.build())));
assertEquals(42, integer.get()); assertEquals(42, integer.get());
} }
@ -77,13 +81,15 @@ public class IndexSettingsTests extends ESTestCase {
settings.getScopedSettings().addSettingsUpdateConsumer(notUpdated, builder::append); settings.getScopedSettings().addSettingsUpdateConsumer(notUpdated, builder::append);
assertEquals(0, integer.get()); assertEquals(0, integer.get());
assertEquals("", builder.toString()); assertEquals("", builder.toString());
IndexMetaData newMetaData = newIndexMeta("index", Settings.builder().put(settings.getIndexMetaData().getSettings()).put("index.test.setting.int", 42).build()); IndexMetaData newMetaData = newIndexMeta("index", Settings.builder().put(settings.getIndexMetaData().getSettings())
.put("index.test.setting.int", 42).build());
assertTrue(settings.updateIndexMetaData(newMetaData)); assertTrue(settings.updateIndexMetaData(newMetaData));
assertSame(settings.getIndexMetaData(), newMetaData); assertSame(settings.getIndexMetaData(), newMetaData);
assertEquals(42, integer.get()); assertEquals(42, integer.get());
assertEquals("", builder.toString()); assertEquals("", builder.toString());
integer.set(0); integer.set(0);
assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(settings.getIndexMetaData().getSettings()).put("index.not.updated", "boom").build()))); assertTrue(settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(settings.getIndexMetaData().getSettings())
.put("index.not.updated", "boom").build())));
assertEquals("boom", builder.toString()); assertEquals("boom", builder.toString());
assertEquals("not updated - we preserve the old settings", 0, integer.get()); assertEquals("not updated - we preserve the old settings", 0, integer.get());
@ -91,21 +97,25 @@ public class IndexSettingsTests extends ESTestCase {
public void testSettingsConsistency() { public void testSettingsConsistency() {
Version version = VersionUtils.getPreviousVersion(); Version version = VersionUtils.getPreviousVersion();
IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build()); IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version)
.build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(version, settings.getIndexVersionCreated()); assertEquals(version, settings.getIndexVersionCreated());
assertEquals("_na_", settings.getUUID()); assertEquals("_na_", settings.getUUID());
try { try {
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("index.test.setting.int", 42).build())); settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED,
Version.CURRENT).put("index.test.setting.int", 42).build()));
fail("version has changed"); fail("version has changed");
} catch (IllegalArgumentException ex) { } catch (IllegalArgumentException ex) {
assertTrue(ex.getMessage(), ex.getMessage().startsWith("version mismatch on settings update expected: ")); assertTrue(ex.getMessage(), ex.getMessage().startsWith("version mismatch on settings update expected: "));
} }
metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build()); metaData = newIndexMeta("index", Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexMetaData.SETTING_INDEX_UUID, "0xdeadbeef").build());
settings = new IndexSettings(metaData, Settings.EMPTY); settings = new IndexSettings(metaData, Settings.EMPTY);
try { try {
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).put("index.test.setting.int", 42).build())); settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED,
Version.CURRENT).put("index.test.setting.int", 42).build()));
fail("uuid missing/change"); fail("uuid missing/change");
} catch (IllegalArgumentException ex) { } catch (IllegalArgumentException ex) {
assertEquals("uuid mismatch on settings update expected: 0xdeadbeef but was: _na_", ex.getMessage()); assertEquals("uuid mismatch on settings update expected: 0xdeadbeef but was: _na_", ex.getMessage());
@ -118,7 +128,8 @@ public class IndexSettingsTests extends ESTestCase {
if (settings.length > 0) { if (settings.length > 0) {
settingSet.addAll(Arrays.asList(settings)); settingSet.addAll(Arrays.asList(settings));
} }
return new IndexSettings(metaData, nodeSettings, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()), new IndexScopedSettings(Settings.EMPTY, settingSet)); return new IndexSettings(metaData, nodeSettings, (idx) -> Regex.simpleMatch(idx, metaData.getIndex().getName()),
new IndexScopedSettings(Settings.EMPTY, settingSet));
} }
@ -172,7 +183,8 @@ public class IndexSettingsTests extends ESTestCase {
.build()); .build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(Translog.Durability.ASYNC, settings.getTranslogDurability()); assertEquals(Translog.Durability.ASYNC, settings.getTranslogDurability());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), "request").build())); settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(),
"request").build()));
assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability()); assertEquals(Translog.Durability.REQUEST, settings.getTranslogDurability());
metaData = newIndexMeta("index", Settings.settingsBuilder() metaData = newIndexMeta("index", Settings.settingsBuilder()
@ -189,7 +201,8 @@ public class IndexSettingsTests extends ESTestCase {
.build()); .build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertFalse(settings.isWarmerEnabled()); assertFalse(settings.isWarmerEnabled());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_WARMER_ENABLED_SETTING.getKey(), "true").build())); settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_WARMER_ENABLED_SETTING.getKey(),
"true").build()));
assertTrue(settings.isWarmerEnabled()); assertTrue(settings.isWarmerEnabled());
metaData = newIndexMeta("index", Settings.settingsBuilder() metaData = newIndexMeta("index", Settings.settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
@ -205,10 +218,13 @@ public class IndexSettingsTests extends ESTestCase {
.put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval) .put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), refreshInterval)
.build()); .build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(TimeValue.parseTimeValue(refreshInterval, new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), settings.getRefreshInterval()); assertEquals(TimeValue.parseTimeValue(refreshInterval, new TimeValue(1, TimeUnit.DAYS),
IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), settings.getRefreshInterval());
String newRefreshInterval = getRandomTimeString(); String newRefreshInterval = getRandomTimeString();
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(), newRefreshInterval).build())); settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey(),
assertEquals(TimeValue.parseTimeValue(newRefreshInterval, new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), settings.getRefreshInterval()); newRefreshInterval).build()));
assertEquals(TimeValue.parseTimeValue(newRefreshInterval, new TimeValue(1, TimeUnit.DAYS),
IndexSettings.INDEX_REFRESH_INTERVAL_SETTING.getKey()), settings.getRefreshInterval());
} }
private String getRandomTimeString() { private String getRandomTimeString() {
@ -227,7 +243,8 @@ public class IndexSettingsTests extends ESTestCase {
.build()); .build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(15, settings.getMaxResultWindow()); assertEquals(15, settings.getMaxResultWindow());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(), 42).build())); settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey(),
42).build()));
assertEquals(42, settings.getMaxResultWindow()); assertEquals(42, settings.getMaxResultWindow());
settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY));
assertEquals(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY).intValue(), settings.getMaxResultWindow()); assertEquals(IndexSettings.MAX_RESULT_WINDOW_SETTING.get(Settings.EMPTY).intValue(), settings.getMaxResultWindow());
@ -246,11 +263,15 @@ public class IndexSettingsTests extends ESTestCase {
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), gcDeleteSetting.getStringRep()) .put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), gcDeleteSetting.getStringRep())
.build()); .build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(TimeValue.parseTimeValue(gcDeleteSetting.getStringRep(), new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_GC_DELETES_SETTING.getKey()).getMillis(), settings.getGcDeletesInMillis()); assertEquals(TimeValue.parseTimeValue(gcDeleteSetting.getStringRep(), new TimeValue(1, TimeUnit.DAYS),
IndexSettings.INDEX_GC_DELETES_SETTING.getKey()).getMillis(), settings.getGcDeletesInMillis());
TimeValue newGCDeleteSetting = new TimeValue(Math.abs(randomInt()), TimeUnit.MILLISECONDS); TimeValue newGCDeleteSetting = new TimeValue(Math.abs(randomInt()), TimeUnit.MILLISECONDS);
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), newGCDeleteSetting.getStringRep()).build())); settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(),
assertEquals(TimeValue.parseTimeValue(newGCDeleteSetting.getStringRep(), new TimeValue(1, TimeUnit.DAYS), IndexSettings.INDEX_GC_DELETES_SETTING.getKey()).getMillis(), settings.getGcDeletesInMillis()); newGCDeleteSetting.getStringRep()).build()));
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), randomBoolean() ? -1 : new TimeValue(-1, TimeUnit.MILLISECONDS)).build())); assertEquals(TimeValue.parseTimeValue(newGCDeleteSetting.getStringRep(), new TimeValue(1, TimeUnit.DAYS),
IndexSettings.INDEX_GC_DELETES_SETTING.getKey()).getMillis(), settings.getGcDeletesInMillis());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(),
randomBoolean() ? -1 : new TimeValue(-1, TimeUnit.MILLISECONDS)).build()));
assertEquals(-1, settings.getGcDeletesInMillis()); assertEquals(-1, settings.getGcDeletesInMillis());
} }
@ -261,7 +282,8 @@ public class IndexSettingsTests extends ESTestCase {
.build()); .build());
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertFalse(settings.isTTLPurgeDisabled()); assertFalse(settings.isTTLPurgeDisabled());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING.getKey(), "true").build())); settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TTL_DISABLE_PURGE_SETTING.getKey(),
"true").build()));
assertTrue(settings.isTTLPurgeDisabled()); assertTrue(settings.isTTLPurgeDisabled());
settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY)); settings.updateIndexMetaData(newIndexMeta("index", Settings.EMPTY));
@ -276,7 +298,8 @@ public class IndexSettingsTests extends ESTestCase {
public void testTranslogFlushSizeThreshold() { public void testTranslogFlushSizeThreshold() {
ByteSizeValue translogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt())); ByteSizeValue translogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt()));
ByteSizeValue actualValue = ByteSizeValue.parseBytesSizeValue(translogFlushThresholdSize.toString(), IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey()); ByteSizeValue actualValue = ByteSizeValue.parseBytesSizeValue(translogFlushThresholdSize.toString(),
IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey());
IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder() IndexMetaData metaData = newIndexMeta("index", Settings.settingsBuilder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), translogFlushThresholdSize.toString()) .put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), translogFlushThresholdSize.toString())
@ -284,8 +307,33 @@ public class IndexSettingsTests extends ESTestCase {
IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY); IndexSettings settings = new IndexSettings(metaData, Settings.EMPTY);
assertEquals(actualValue, settings.getFlushThresholdSize()); assertEquals(actualValue, settings.getFlushThresholdSize());
ByteSizeValue newTranslogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt())); ByteSizeValue newTranslogFlushThresholdSize = new ByteSizeValue(Math.abs(randomInt()));
ByteSizeValue actualNewTranslogFlushThresholdSize = ByteSizeValue.parseBytesSizeValue(newTranslogFlushThresholdSize.toString(), IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey()); ByteSizeValue actualNewTranslogFlushThresholdSize = ByteSizeValue.parseBytesSizeValue(newTranslogFlushThresholdSize.toString(),
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder().put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), newTranslogFlushThresholdSize.toString()).build())); IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey());
settings.updateIndexMetaData(newIndexMeta("index", Settings.builder()
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), newTranslogFlushThresholdSize.toString()).build()));
assertEquals(actualNewTranslogFlushThresholdSize, settings.getFlushThresholdSize()); assertEquals(actualNewTranslogFlushThresholdSize, settings.getFlushThresholdSize());
} }
public void testArchiveBrokenIndexSettings() {
Settings settings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrBrokenSettings(Settings.EMPTY);
assertSame(settings, Settings.EMPTY);
settings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrBrokenSettings(Settings.builder()
.put("index.refresh_interval", "-200").build());
assertEquals("-200", settings.get("archived.index.refresh_interval"));
assertNull(settings.get("index.refresh_interval"));
Settings prevSettings = settings; // no double archive
settings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrBrokenSettings(prevSettings);
assertSame(prevSettings, settings);
settings = IndexScopedSettings.DEFAULT_SCOPED_SETTINGS.archiveUnknownOrBrokenSettings(Settings.builder()
.put("index.version.created", Version.CURRENT.id) // private setting
.put("index.unknown", "foo")
.put("index.refresh_interval", "2s").build());
assertEquals("foo", settings.get("archived.index.unknown"));
assertEquals(Integer.toString(Version.CURRENT.id), settings.get("index.version.created"));
assertEquals("2s", settings.get("index.refresh_interval"));
}
} }

View File

@ -21,8 +21,23 @@ package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.analysis.util.CharArraySet;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.BufferedWriter;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import java.nio.charset.MalformedInputException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import static org.elasticsearch.common.settings.Settings.settingsBuilder; import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
@ -42,4 +57,55 @@ public class AnalysisTests extends ESTestCase {
assertThat(set.contains("bar"), is(true)); assertThat(set.contains("bar"), is(true));
assertThat(set.contains("baz"), is(false)); assertThat(set.contains("baz"), is(false));
} }
public void testParseNonExistingFile() {
Path tempDir = createTempDir();
Settings nodeSettings = Settings.builder()
.put("foo.bar_path", tempDir.resolve("foo.dict"))
.put(Environment.PATH_HOME_SETTING.getKey(), tempDir).build();
Environment env = new Environment(nodeSettings);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> Analysis.getWordList(env, nodeSettings, "foo.bar"));
assertEquals("IOException while reading foo.bar_path: " + tempDir.resolve("foo.dict").toString(), ex.getMessage());
assertTrue(ex.getCause().toString(), ex.getCause() instanceof FileNotFoundException
|| ex.getCause() instanceof NoSuchFileException);
}
public void testParseFalseEncodedFile() throws IOException {
Path tempDir = createTempDir();
Path dict = tempDir.resolve("foo.dict");
Settings nodeSettings = Settings.builder()
.put("foo.bar_path", dict)
.put(Environment.PATH_HOME_SETTING.getKey(), tempDir).build();
try (OutputStream writer = Files.newOutputStream(dict)) {
writer.write(new byte[]{(byte) 0xff, 0x00, 0x00}); // some invalid UTF-8
writer.write('\n');
}
Environment env = new Environment(nodeSettings);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> Analysis.getWordList(env, nodeSettings, "foo.bar"));
assertEquals("Unsupported character encoding detected while reading foo.bar_path: " + tempDir.resolve("foo.dict").toString()
+ " - files must be UTF-8 encoded" , ex.getMessage());
assertTrue(ex.getCause().toString(), ex.getCause() instanceof MalformedInputException
|| ex.getCause() instanceof CharacterCodingException);
}
public void testParseWordList() throws IOException {
Path tempDir = createTempDir();
Path dict = tempDir.resolve("foo.dict");
Settings nodeSettings = Settings.builder()
.put("foo.bar_path", dict)
.put(Environment.PATH_HOME_SETTING.getKey(), tempDir).build();
try (BufferedWriter writer = Files.newBufferedWriter(dict, StandardCharsets.UTF_8)) {
writer.write("hello");
writer.write('\n');
writer.write("world");
writer.write('\n');
}
Environment env = new Environment(nodeSettings);
List<String> wordList = Analysis.getWordList(env, nodeSettings, "foo.bar");
assertEquals(Arrays.asList("hello", "world"), wordList);
}
} }

View File

@ -0,0 +1,93 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.mapper.object.DynamicTemplate;
import org.elasticsearch.test.ESTestCase;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class DynamicTemplateTests extends ESTestCase {
public void testParseUnknownParam() throws Exception {
Map<String, Object> templateDef = new HashMap<>();
templateDef.put("match_mapping_type", "string");
templateDef.put("mapping", Collections.singletonMap("store", true));
templateDef.put("random_param", "random_value");
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0));
assertEquals("Illegal dynamic template parameter: [random_param]", e.getMessage());
// but no issues on 2.x for bw compat
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_2_3_0);
XContentBuilder builder = JsonXContent.contentBuilder();
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", builder.string());
}
public void testSerialization() throws Exception {
// type-based template
Map<String, Object> templateDef = new HashMap<>();
templateDef.put("match_mapping_type", "string");
templateDef.put("mapping", Collections.singletonMap("store", true));
DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0);
XContentBuilder builder = JsonXContent.contentBuilder();
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", builder.string());
// name-based template
templateDef = new HashMap<>();
templateDef.put("match", "*name");
templateDef.put("unmatch", "first_name");
templateDef.put("mapping", Collections.singletonMap("store", true));
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0);
builder = JsonXContent.contentBuilder();
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\"match\":\"*name\",\"unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", builder.string());
// path-based template
templateDef = new HashMap<>();
templateDef.put("path_match", "*name");
templateDef.put("path_unmatch", "first_name");
templateDef.put("mapping", Collections.singletonMap("store", true));
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0);
builder = JsonXContent.contentBuilder();
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\"path_match\":\"*name\",\"path_unmatch\":\"first_name\",\"mapping\":{\"store\":true}}",
builder.string());
// regex matching
templateDef = new HashMap<>();
templateDef.put("match", "^a$");
templateDef.put("match_pattern", "regex");
templateDef.put("mapping", Collections.singletonMap("store", true));
template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0);
builder = JsonXContent.contentBuilder();
template.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\"match\":\"^a$\",\"match_pattern\":\"regex\",\"mapping\":{\"store\":true}}", builder.string());
}
}

View File

@ -18,6 +18,9 @@
*/ */
package org.elasticsearch.index.mapper.internal; package org.elasticsearch.index.mapper.internal;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.FieldTypeTestCase;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.junit.Before; import org.junit.Before;
@ -38,4 +41,15 @@ public class FieldNamesFieldTypeTests extends FieldTypeTestCase {
} }
}); });
} }
public void testTermQuery() {
FieldNamesFieldMapper.FieldNamesFieldType type = new FieldNamesFieldMapper.FieldNamesFieldType();
type.setName(FieldNamesFieldMapper.CONTENT_TYPE);
type.setEnabled(true);
Query termQuery = type.termQuery("field_name", null);
assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.CONTENT_TYPE, "field_name")), termQuery);
type.setEnabled(false);
IllegalStateException e = expectThrows(IllegalStateException.class, () -> type.termQuery("field_name", null));
assertEquals("Cannot run [exists] queries if the [_field_names] field is disabled", e.getMessage());
}
} }

View File

@ -24,7 +24,6 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
@ -55,13 +54,8 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase<ExistsQueryBu
@Override @Override
protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(ExistsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
String fieldPattern = queryBuilder.fieldName(); String fieldPattern = queryBuilder.fieldName();
ObjectMapper objectMapper = context.getObjectMapper(fieldPattern);
if (objectMapper != null) {
// automatic make the object mapper pattern
fieldPattern = fieldPattern + ".*";
}
Collection<String> fields = context.simpleMatchToIndexNames(fieldPattern); Collection<String> fields = context.simpleMatchToIndexNames(fieldPattern);
if (getCurrentTypes().length == 0 || fields.size() == 0) { if (getCurrentTypes().length == 0) {
assertThat(query, instanceOf(BooleanQuery.class)); assertThat(query, instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) query; BooleanQuery booleanQuery = (BooleanQuery) query;
assertThat(booleanQuery.clauses().size(), equalTo(0)); assertThat(booleanQuery.clauses().size(), equalTo(0));

View File

@ -430,10 +430,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); assertThat(rewritten, instanceOf(RangeQueryBuilder.class));
RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten;
assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); assertThat(rewrittenRange.fieldName(), equalTo(fieldName));
assertThat(rewrittenRange.from(), equalTo(shardMinValue)); assertThat(rewrittenRange.from(), equalTo(null));
assertThat(rewrittenRange.to(), equalTo(shardMaxValue)); assertThat(rewrittenRange.to(), equalTo(null));
assertThat(rewrittenRange.includeLower(), equalTo(true));
assertThat(rewrittenRange.includeUpper(), equalTo(true));
} }
public void testRewriteLongToMatchNone() throws IOException { public void testRewriteLongToMatchNone() throws IOException {
@ -509,10 +507,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); assertThat(rewritten, instanceOf(RangeQueryBuilder.class));
RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten;
assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); assertThat(rewrittenRange.fieldName(), equalTo(fieldName));
assertThat(rewrittenRange.from(), equalTo(shardMinValue)); assertThat(rewrittenRange.from(), equalTo(null));
assertThat(rewrittenRange.to(), equalTo(shardMaxValue)); assertThat(rewrittenRange.to(), equalTo(null));
assertThat(rewrittenRange.includeLower(), equalTo(true));
assertThat(rewrittenRange.includeUpper(), equalTo(true));
} }
public void testRewriteDoubleToMatchNone() throws IOException { public void testRewriteDoubleToMatchNone() throws IOException {
@ -588,10 +584,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); assertThat(rewritten, instanceOf(RangeQueryBuilder.class));
RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten;
assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); assertThat(rewrittenRange.fieldName(), equalTo(fieldName));
assertThat(rewrittenRange.from(), equalTo(shardMinValue)); assertThat(rewrittenRange.from(), equalTo(null));
assertThat(rewrittenRange.to(), equalTo(shardMaxValue)); assertThat(rewrittenRange.to(), equalTo(null));
assertThat(rewrittenRange.includeLower(), equalTo(true));
assertThat(rewrittenRange.includeUpper(), equalTo(true));
} }
public void testRewriteFloatToMatchNone() throws IOException { public void testRewriteFloatToMatchNone() throws IOException {
@ -667,10 +661,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); assertThat(rewritten, instanceOf(RangeQueryBuilder.class));
RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten;
assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); assertThat(rewrittenRange.fieldName(), equalTo(fieldName));
assertThat(rewrittenRange.from(), equalTo(shardMinValue)); assertThat(rewrittenRange.from(), equalTo(null));
assertThat(rewrittenRange.to(), equalTo(shardMaxValue)); assertThat(rewrittenRange.to(), equalTo(null));
assertThat(rewrittenRange.includeLower(), equalTo(true));
assertThat(rewrittenRange.includeUpper(), equalTo(true));
} }
public void testRewriteTextToMatchNone() throws IOException { public void testRewriteTextToMatchNone() throws IOException {
@ -746,10 +738,43 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
assertThat(rewritten, instanceOf(RangeQueryBuilder.class)); assertThat(rewritten, instanceOf(RangeQueryBuilder.class));
RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten; RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten;
assertThat(rewrittenRange.fieldName(), equalTo(fieldName)); assertThat(rewrittenRange.fieldName(), equalTo(fieldName));
assertThat(rewrittenRange.from(), equalTo(shardMinValue.getMillis())); assertThat(rewrittenRange.from(), equalTo(null));
assertThat(rewrittenRange.to(), equalTo(shardMaxValue.getMillis())); assertThat(rewrittenRange.to(), equalTo(null));
assertThat(rewrittenRange.includeLower(), equalTo(true)); }
assertThat(rewrittenRange.includeUpper(), equalTo(true));
public void testRewriteDateWithNowToMatchAll() throws IOException {
String fieldName = randomAsciiOfLengthBetween(1, 20);
RangeQueryBuilder query = new RangeQueryBuilder(fieldName);
String queryFromValue = "now-2d";
String queryToValue = "now";
DateTime shardMinValue = new DateTime().minusHours(12);
DateTime shardMaxValue = new DateTime().minusHours(24);
query.from(queryFromValue);
query.to(queryToValue);
QueryShardContext queryShardContext = queryShardContext();
FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) {
@Override
public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper,
DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException {
return Relation.WITHIN;
}
@SuppressWarnings("unchecked")
@Override
public <T extends Comparable<T>> FieldStats<T> get(String field) throws IOException {
assertThat(field, equalTo(fieldName));
return (FieldStats<T>) new FieldStats.Date(randomLong(), randomLong(), randomLong(), randomLong(),
shardMinValue.getMillis(), shardMaxValue.getMillis(), null);
}
};
queryShardContext.setFieldStatsProvider(fieldStatsProvider);
QueryBuilder<?> rewritten = query.rewrite(queryShardContext);
assertThat(rewritten, instanceOf(RangeQueryBuilder.class));
RangeQueryBuilder rewrittenRange = (RangeQueryBuilder) rewritten;
assertThat(rewrittenRange.fieldName(), equalTo(fieldName));
assertThat(rewrittenRange.from(), equalTo(null));
assertThat(rewrittenRange.to(), equalTo(null));
} }
public void testRewriteDateToMatchNone() throws IOException { public void testRewriteDateToMatchNone() throws IOException {
@ -773,6 +798,27 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class)); assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class));
} }
public void testRewriteDateWithNowToMatchNone() throws IOException {
String fieldName = randomAsciiOfLengthBetween(1, 20);
RangeQueryBuilder query = new RangeQueryBuilder(fieldName);
String queryFromValue = "now-2d";
String queryToValue = "now";
query.from(queryFromValue);
query.to(queryToValue);
QueryShardContext queryShardContext = queryShardContext();
FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) {
@Override
public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper,
DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException {
return Relation.DISJOINT;
}
};
queryShardContext.setFieldStatsProvider(fieldStatsProvider);
QueryBuilder<?> rewritten = query.rewrite(queryShardContext);
assertThat(rewritten, instanceOf(MatchNoneQueryBuilder.class));
}
public void testRewriteDateToSame() throws IOException { public void testRewriteDateToSame() throws IOException {
String fieldName = randomAsciiOfLengthBetween(1, 20); String fieldName = randomAsciiOfLengthBetween(1, 20);
RangeQueryBuilder query = new RangeQueryBuilder(fieldName); RangeQueryBuilder query = new RangeQueryBuilder(fieldName);
@ -793,4 +839,25 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
QueryBuilder<?> rewritten = query.rewrite(queryShardContext); QueryBuilder<?> rewritten = query.rewrite(queryShardContext);
assertThat(rewritten, sameInstance(query)); assertThat(rewritten, sameInstance(query));
} }
public void testRewriteDateWithNowToSame() throws IOException {
String fieldName = randomAsciiOfLengthBetween(1, 20);
RangeQueryBuilder query = new RangeQueryBuilder(fieldName);
String queryFromValue = "now-2d";
String queryToValue = "now";
query.from(queryFromValue);
query.to(queryToValue);
QueryShardContext queryShardContext = queryShardContext();
FieldStatsProvider fieldStatsProvider = new FieldStatsProvider(null, null) {
@Override
public Relation isFieldWithinQuery(String fieldName, Object from, Object to, boolean includeLower, boolean includeUpper,
DateTimeZone timeZone, DateMathParser dateMathParser) throws IOException {
return Relation.INTERSECTS;
}
};
queryShardContext.setFieldStatsProvider(fieldStatsProvider);
QueryBuilder<?> rewritten = query.rewrite(queryShardContext);
assertThat(rewritten, sameInstance(query));
}
} }

View File

@ -27,7 +27,10 @@ import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInter
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import org.joda.time.chrono.ISOChronology;
import java.util.List; import java.util.List;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
@ -233,4 +236,122 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
equalTo(1L)); equalTo(1L));
} }
public void testQueryRewriteDatesWithNow() throws Exception {
assertAcked(client().admin().indices().prepareCreate("index-1").addMapping("type", "d", "type=date")
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, IndexMetaData.SETTING_NUMBER_OF_SHARDS,
1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.get());
assertAcked(client().admin().indices().prepareCreate("index-2").addMapping("type", "d", "type=date")
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, IndexMetaData.SETTING_NUMBER_OF_SHARDS,
1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.get());
assertAcked(client().admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date")
.setSettings(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true, IndexMetaData.SETTING_NUMBER_OF_SHARDS,
1, IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.get());
DateTime now = new DateTime(ISOChronology.getInstanceUTC());
indexRandom(true, client().prepareIndex("index-1", "type", "1").setSource("d", now),
client().prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)),
client().prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2)),
client().prepareIndex("index-2", "type", "4").setSource("d", now.minusDays(3)),
client().prepareIndex("index-2", "type", "5").setSource("d", now.minusDays(4)),
client().prepareIndex("index-2", "type", "6").setSource("d", now.minusDays(5)),
client().prepareIndex("index-3", "type", "7").setSource("d", now.minusDays(6)),
client().prepareIndex("index-3", "type", "8").setSource("d", now.minusDays(7)),
client().prepareIndex("index-3", "type", "9").setSource("d", now.minusDays(8)));
ensureSearchable("index-1", "index-2", "index-3");
assertThat(
client().admin().indices().prepareStats("index-1").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(0L));
assertThat(
client().admin().indices().prepareStats("index-1").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(0L));
assertThat(
client().admin().indices().prepareStats("index-2").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(0L));
assertThat(
client().admin().indices().prepareStats("index-2").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(0L));
assertThat(
client().admin().indices().prepareStats("index-3").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(0L));
assertThat(
client().admin().indices().prepareStats("index-3").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(0L));
final SearchResponse r1 = client().prepareSearch("index-*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")).get();
assertSearchResponse(r1);
assertThat(r1.getHits().getTotalHits(), equalTo(8L));
assertThat(
client().admin().indices().prepareStats("index-1").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(0L));
assertThat(
client().admin().indices().prepareStats("index-1").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(1L));
assertThat(
client().admin().indices().prepareStats("index-2").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(0L));
assertThat(
client().admin().indices().prepareStats("index-2").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(1L));
// Because the query will INTERSECT with the 3rd index it will not be
// rewritten and will still contain `now` so won't be recorded as a
// cache miss or cache hit since queries containing now can't be cached
assertThat(
client().admin().indices().prepareStats("index-3").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(0L));
assertThat(
client().admin().indices().prepareStats("index-3").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(0L));
final SearchResponse r2 = client().prepareSearch("index-*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")).get();
assertSearchResponse(r2);
assertThat(r2.getHits().getTotalHits(), equalTo(8L));
assertThat(
client().admin().indices().prepareStats("index-1").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(1L));
assertThat(
client().admin().indices().prepareStats("index-1").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(1L));
assertThat(
client().admin().indices().prepareStats("index-2").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(1L));
assertThat(
client().admin().indices().prepareStats("index-2").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(1L));
assertThat(
client().admin().indices().prepareStats("index-3").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(0L));
assertThat(
client().admin().indices().prepareStats("index-3").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(0L));
final SearchResponse r3 = client().prepareSearch("index-*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0)
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")).get();
assertSearchResponse(r3);
assertThat(r3.getHits().getTotalHits(), equalTo(8L));
assertThat(
client().admin().indices().prepareStats("index-1").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(2L));
assertThat(
client().admin().indices().prepareStats("index-1").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(1L));
assertThat(
client().admin().indices().prepareStats("index-2").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(2L));
assertThat(
client().admin().indices().prepareStats("index-2").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(1L));
assertThat(
client().admin().indices().prepareStats("index-3").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(),
equalTo(0L));
assertThat(
client().admin().indices().prepareStats("index-3").setRequestCache(true).get().getTotal().getRequestCache().getMissCount(),
equalTo(0L));
}
} }

View File

@ -22,8 +22,12 @@ package org.elasticsearch.recovery;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.admin.indices.stats.ShardStats;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.routing.Murmur3HashFunction; import org.elasticsearch.cluster.routing.Murmur3HashFunction;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Priority; import org.elasticsearch.common.Priority;
import org.elasticsearch.common.logging.ESLogger; import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
@ -31,6 +35,7 @@ import org.elasticsearch.common.math.MathUtils;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.index.shard.DocsStats;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortOrder;
@ -50,7 +55,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAllS
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
@TestLogging("_root:DEBUG") @TestLogging("_root:DEBUG,index.shard:TRACE")
public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
private final ESLogger logger = Loggers.getLogger(RecoveryWhileUnderLoadIT.class); private final ESLogger logger = Loggers.getLogger(RecoveryWhileUnderLoadIT.class);
@ -270,14 +275,12 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
private void iterateAssertCount(final int numberOfShards, final long numberOfDocs, final int iterations) throws Exception { private void iterateAssertCount(final int numberOfShards, final long numberOfDocs, final int iterations) throws Exception {
SearchResponse[] iterationResults = new SearchResponse[iterations]; SearchResponse[] iterationResults = new SearchResponse[iterations];
boolean error = false; boolean error = false;
SearchResponse lastErroneousResponse = null;
for (int i = 0; i < iterations; i++) { for (int i = 0; i < iterations; i++) {
SearchResponse searchResponse = client().prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).addSort("id", SortOrder.ASC).get(); SearchResponse searchResponse = client().prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).addSort("id", SortOrder.ASC).get();
logSearchResponse(numberOfShards, numberOfDocs, i, searchResponse); logSearchResponse(numberOfShards, numberOfDocs, i, searchResponse);
iterationResults[i] = searchResponse; iterationResults[i] = searchResponse;
if (searchResponse.getHits().totalHits() != numberOfDocs) { if (searchResponse.getHits().totalHits() != numberOfDocs) {
error = true; error = true;
lastErroneousResponse = searchResponse;
} }
} }
@ -289,12 +292,21 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
logger.info("shard [{}] - count {}, primary {}", shardStats.getShardRouting().id(), docsStats.getCount(), shardStats.getShardRouting().primary()); logger.info("shard [{}] - count {}, primary {}", shardStats.getShardRouting().id(), docsStats.getCount(), shardStats.getShardRouting().primary());
} }
ClusterService clusterService = clusterService();
for (int doc = 1, hit = 0; hit < lastErroneousResponse.getHits().getHits().length; hit++, doc++) { final ClusterState state = clusterService.state();
SearchHit searchHit = lastErroneousResponse.getHits().getAt(hit); for (int shard = 0; shard < numberOfShards; shard++) {
while (doc < Integer.parseInt(searchHit.id())) { // background indexer starts using ids on 1
logger.info("missing doc [{}], indexed to shard [{}]", doc, MathUtils.mod(Murmur3HashFunction.hash(Integer.toString(doc)), numberOfShards)); for (int id = 1; id <= numberOfDocs; id++) {
doc++; ShardId docShard = clusterService.operationRouting().shardId(state, "test", Long.toString(id), null);
if (docShard.id() == shard) {
for (ShardRouting shardRouting : state.routingTable().shardRoutingTable("test", shard)) {
GetResponse response = client().prepareGet("test", "type", Long.toString(id))
.setPreference("_only_node:" + shardRouting.currentNodeId()).get();
if (response.isExists()) {
logger.info("missing id [{}] on shard {}", id, shardRouting);
}
}
}
} }
} }

View File

@ -58,9 +58,6 @@ public class ExistsIT extends ESIntegTestCase {
XContentBuilder mapping = XContentBuilder.builder(JsonXContent.jsonXContent) XContentBuilder mapping = XContentBuilder.builder(JsonXContent.jsonXContent)
.startObject() .startObject()
.startObject("type") .startObject("type")
.startObject(FieldNamesFieldMapper.NAME)
.field("enabled", randomBoolean())
.endObject()
.startObject("properties") .startObject("properties")
.startObject("foo") .startObject("foo")
.field("type", "text") .field("type", "text")
@ -89,10 +86,10 @@ public class ExistsIT extends ESIntegTestCase {
.endObject(); .endObject();
assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", mapping)); assertAcked(client().admin().indices().prepareCreate("idx").addMapping("type", mapping));
@SuppressWarnings("unchecked")
Map<String, Object> barObject = new HashMap<>(); Map<String, Object> barObject = new HashMap<>();
barObject.put("foo", "bar"); barObject.put("foo", "bar");
barObject.put("bar", singletonMap("bar", "foo")); barObject.put("bar", singletonMap("bar", "foo"));
@SuppressWarnings("unchecked")
final Map<String, Object>[] sources = new Map[] { final Map<String, Object>[] sources = new Map[] {
// simple property // simple property
singletonMap("foo", "bar"), singletonMap("foo", "bar"),

View File

@ -19,6 +19,8 @@
package org.elasticsearch.search.sort; package org.elasticsearch.search.sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
@ -30,27 +32,75 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper.DoubleFieldType;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper.Nested;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptContextRegistry;
import org.elasticsearch.script.ScriptEngineRegistry;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptServiceTests.TestEngineService;
import org.elasticsearch.script.ScriptSettings;
import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.watcher.ResourceWatcherService;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
public abstract class AbstractSortTestCase<T extends SortBuilder & SortBuilderParser<T>> extends ESTestCase { public abstract class AbstractSortTestCase<T extends SortBuilder<T> & SortBuilderParser<T>> extends ESTestCase {
protected static NamedWriteableRegistry namedWriteableRegistry; protected static NamedWriteableRegistry namedWriteableRegistry;
private static final int NUMBER_OF_TESTBUILDERS = 20; private static final int NUMBER_OF_TESTBUILDERS = 20;
static IndicesQueriesRegistry indicesQueriesRegistry; static IndicesQueriesRegistry indicesQueriesRegistry;
private static SortParseElement parseElement = new SortParseElement();
private static ScriptService scriptService;
@BeforeClass @BeforeClass
public static void init() { public static void init() throws IOException {
Path genericConfigFolder = createTempDir();
Settings baseSettings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(Environment.PATH_CONF_SETTING.getKey(), genericConfigFolder)
.build();
Environment environment = new Environment(baseSettings);
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry
.ScriptEngineRegistration(TestEngineService.class, TestEngineService.TYPES)));
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
scriptService = new ScriptService(baseSettings, environment, Collections.singleton(new TestEngineService()),
new ResourceWatcherService(baseSettings, null), scriptEngineRegistry, scriptContextRegistry, scriptSettings) {
@Override
public CompiledScript compile(Script script, ScriptContext scriptContext, Map<String, String> params) {
return new CompiledScript(ScriptType.INLINE, "mockName", "test", script);
}
};
namedWriteableRegistry = new NamedWriteableRegistry(); namedWriteableRegistry = new NamedWriteableRegistry();
namedWriteableRegistry.registerPrototype(SortBuilder.class, GeoDistanceSortBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(SortBuilder.class, GeoDistanceSortBuilder.PROTOTYPE);
namedWriteableRegistry.registerPrototype(SortBuilder.class, ScoreSortBuilder.PROTOTYPE); namedWriteableRegistry.registerPrototype(SortBuilder.class, ScoreSortBuilder.PROTOTYPE);
@ -97,13 +147,40 @@ public abstract class AbstractSortTestCase<T extends SortBuilder & SortBuilderPa
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry); QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
context.reset(itemParser); context.reset(itemParser);
SortBuilder parsedItem = testItem.fromXContent(context, elementName); T parsedItem = testItem.fromXContent(context, elementName);
assertNotSame(testItem, parsedItem); assertNotSame(testItem, parsedItem);
assertEquals(testItem, parsedItem); assertEquals(testItem, parsedItem);
assertEquals(testItem.hashCode(), parsedItem.hashCode()); assertEquals(testItem.hashCode(), parsedItem.hashCode());
} }
} }
/**
* test that build() outputs a {@link SortField} that is similar to the one
* we would get when parsing the xContent the sort builder is rendering out
*/
public void testBuildSortField() throws IOException {
QueryShardContext mockShardContext = createMockShardContext();
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
T sortBuilder = createTestItem();
SortField sortField = sortBuilder.build(mockShardContext);
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
if (randomBoolean()) {
builder.prettyPrint();
}
builder.startObject();
sortBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
XContentParser parser = XContentHelper.createParser(builder.bytes());
parser.nextToken();
List<SortField> sortFields = parseElement.parse(parser, mockShardContext);
assertEquals(1, sortFields.size());
SortField sortFieldOldStyle = sortFields.get(0);
assertEquals(sortFieldOldStyle.getField(), sortField.getField());
assertEquals(sortFieldOldStyle.getReverse(), sortField.getReverse());
assertEquals(sortFieldOldStyle.getType(), sortField.getType());
}
}
/** /**
* Test serialization and deserialization of the test sort. * Test serialization and deserialization of the test sort.
*/ */
@ -148,8 +225,50 @@ public abstract class AbstractSortTestCase<T extends SortBuilder & SortBuilderPa
} }
} }
private QueryShardContext createMockShardContext() {
Index index = new Index(randomAsciiOfLengthBetween(1, 10), "_na_");
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, Settings.EMPTY);
IndicesFieldDataCache cache = new IndicesFieldDataCache(Settings.EMPTY, null);
IndexFieldDataService ifds = new IndexFieldDataService(IndexSettingsModule.newIndexSettings("test", Settings.EMPTY),
cache, null, null);
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(idxSettings, new BitsetFilterCache.Listener() {
@Override
public void onRemoval(ShardId shardId, Accountable accountable) {
}
@Override
public void onCache(ShardId shardId, Accountable accountable) {
}
});
return new QueryShardContext(idxSettings, bitsetFilterCache, ifds, null, null, scriptService,
indicesQueriesRegistry, null) {
@Override
public MappedFieldType fieldMapper(String name) {
return provideMappedFieldType(name);
}
@Override
public ObjectMapper getObjectMapper(String name) {
BuilderContext context = new BuilderContext(Settings.EMPTY, new ContentPath());
return new ObjectMapper.Builder<>(name).nested(Nested.newNested(false, false)).build(context);
}
};
}
/**
* Return a field type. We use {@link DoubleFieldType} by default since it is compatible with all sort modes
* Tests that require other field type than double can override this.
*/
protected MappedFieldType provideMappedFieldType(String name) {
DoubleFieldType doubleFieldType = new DoubleFieldType();
doubleFieldType.setName(name);
doubleFieldType.setHasDocValues(true);
return doubleFieldType;
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
protected T copyItem(T original) throws IOException { private T copyItem(T original) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) { try (BytesStreamOutput output = new BytesStreamOutput()) {
original.writeTo(output); original.writeTo(output);
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) { try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {

View File

@ -25,7 +25,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase<FieldSortBuilder
@Override @Override
protected FieldSortBuilder createTestItem() { protected FieldSortBuilder createTestItem() {
String fieldName = randomAsciiOfLengthBetween(1, 10); String fieldName = rarely() ? SortParseElement.DOC_FIELD_NAME : randomAsciiOfLengthBetween(1, 10);
FieldSortBuilder builder = new FieldSortBuilder(fieldName); FieldSortBuilder builder = new FieldSortBuilder(fieldName);
if (randomBoolean()) { if (randomBoolean()) {
builder.order(RandomSortDataGenerator.order(builder.order())); builder.order(RandomSortDataGenerator.order(builder.order()));

View File

@ -999,6 +999,42 @@ public class FieldSortIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(2).id(), equalTo(Integer.toString(3))); assertThat(searchResponse.getHits().getAt(2).id(), equalTo(Integer.toString(3)));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).longValue(), equalTo(2L)); assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).longValue(), equalTo(2L));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.setSize(10)
.addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.AVG))
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
assertThat(searchResponse.getHits().hits().length, equalTo(3));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo(Integer.toString(2)));
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).longValue(), equalTo(13L));
assertThat(searchResponse.getHits().getAt(1).id(), equalTo(Integer.toString(1)));
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).longValue(), equalTo(6L));
assertThat(searchResponse.getHits().getAt(2).id(), equalTo(Integer.toString(3)));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).longValue(), equalTo(1L));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.setSize(10)
.addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.MEDIAN))
.execute().actionGet();
assertThat(searchResponse.getHits().getTotalHits(), equalTo(3L));
assertThat(searchResponse.getHits().hits().length, equalTo(3));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo(Integer.toString(2)));
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).longValue(), equalTo(13L));
assertThat(searchResponse.getHits().getAt(1).id(), equalTo(Integer.toString(1)));
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).longValue(), equalTo(7L));
assertThat(searchResponse.getHits().getAt(2).id(), equalTo(Integer.toString(3)));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).longValue(), equalTo(2L));
searchResponse = client().prepareSearch() searchResponse = client().prepareSearch()
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
.setSize(10) .setSize(10)

View File

@ -50,6 +50,8 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSort
import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.closeTo;
public class GeoDistanceSortBuilderIT extends ESIntegTestCase { public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
private static final String LOCATION_FIELD = "location";
@Override @Override
protected Collection<Class<? extends Plugin>> nodePlugins() { protected Collection<Class<? extends Plugin>> nodePlugins() {
return pluginList(InternalSettingsPlugin.class); return pluginList(InternalSettingsPlugin.class);
@ -69,7 +71,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
*/ */
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point")); assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
XContentBuilder d1Builder = jsonBuilder(); XContentBuilder d1Builder = jsonBuilder();
GeoPoint[] d1Points = {new GeoPoint(3, 2), new GeoPoint(4, 1)}; GeoPoint[] d1Points = {new GeoPoint(3, 2), new GeoPoint(4, 1)};
createShuffeldJSONArray(d1Builder, d1Points); createShuffeldJSONArray(d1Builder, d1Points);
@ -95,7 +97,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch() SearchResponse searchResponse = client().prepareSearch()
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
.addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MIN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet(); .execute().actionGet();
assertOrderedSearchHits(searchResponse, "d1", "d2"); assertOrderedSearchHits(searchResponse, "d1", "d2");
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 3, 2, DistanceUnit.KILOMETERS), 0.01d)); assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 3, 2, DistanceUnit.KILOMETERS), 0.01d));
@ -103,7 +105,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
searchResponse = client().prepareSearch() searchResponse = client().prepareSearch()
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
.addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MIN).order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MIN).order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet(); .execute().actionGet();
assertOrderedSearchHits(searchResponse, "d2", "d1"); assertOrderedSearchHits(searchResponse, "d2", "d1");
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 5, 1, DistanceUnit.KILOMETERS), 0.01d)); assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 5, 1, DistanceUnit.KILOMETERS), 0.01d));
@ -111,7 +113,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
searchResponse = client().prepareSearch() searchResponse = client().prepareSearch()
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
.addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MAX).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MAX).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet(); .execute().actionGet();
assertOrderedSearchHits(searchResponse, "d1", "d2"); assertOrderedSearchHits(searchResponse, "d1", "d2");
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS), 0.01d)); assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS), 0.01d));
@ -119,18 +121,61 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
searchResponse = client().prepareSearch() searchResponse = client().prepareSearch()
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
.addSort(new GeoDistanceSortBuilder("location", q).sortMode(SortMode.MAX).order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS)) .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MAX).order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet(); .execute().actionGet();
assertOrderedSearchHits(searchResponse, "d2", "d1"); assertOrderedSearchHits(searchResponse, "d2", "d1");
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 6, 2, DistanceUnit.KILOMETERS), 0.01d)); assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 6, 2, DistanceUnit.KILOMETERS), 0.01d));
assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS), 0.01d)); assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS), 0.01d));
} }
public void testSingeToManyAvgMedian() throws ExecutionException, InterruptedException, IOException {
/**
* q = (0, 0)
*
* d1 = (0, 1), (0, 4), (0, 10); so avg. distance is 5, median distance is 4
* d2 = (0, 1), (0, 5), (0, 6); so avg. distance is 4, median distance is 5
*/
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
XContentBuilder d1Builder = jsonBuilder();
GeoPoint[] d1Points = {new GeoPoint(0, 1), new GeoPoint(0, 4), new GeoPoint(0, 10)};
createShuffeldJSONArray(d1Builder, d1Points);
XContentBuilder d2Builder = jsonBuilder();
GeoPoint[] d2Points = {new GeoPoint(0, 1), new GeoPoint(0, 5), new GeoPoint(0, 6)};
createShuffeldJSONArray(d2Builder, d2Points);
logger.info("d1: {}", d1Builder);
logger.info("d2: {}", d2Builder);
indexRandom(true,
client().prepareIndex("index", "type", "d1").setSource(d1Builder),
client().prepareIndex("index", "type", "d2").setSource(d2Builder));
ensureYellow();
GeoPoint q = new GeoPoint(0,0);
SearchResponse searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.AVG).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet();
assertOrderedSearchHits(searchResponse, "d2", "d1");
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(0, 0, 0, 4, DistanceUnit.KILOMETERS), 0.01d));
assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(0, 0, 0, 5, DistanceUnit.KILOMETERS), 0.01d));
searchResponse = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, q).sortMode(SortMode.MEDIAN).order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
.execute().actionGet();
assertOrderedSearchHits(searchResponse, "d1", "d2");
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(0, 0, 0, 4, DistanceUnit.KILOMETERS), 0.01d));
assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(0, 0, 0, 5, DistanceUnit.KILOMETERS), 0.01d));
}
protected void createShuffeldJSONArray(XContentBuilder builder, GeoPoint[] pointsArray) throws IOException { protected void createShuffeldJSONArray(XContentBuilder builder, GeoPoint[] pointsArray) throws IOException {
List<GeoPoint> points = new ArrayList<>(); List<GeoPoint> points = new ArrayList<>();
points.addAll(Arrays.asList(pointsArray)); points.addAll(Arrays.asList(pointsArray));
builder.startObject(); builder.startObject();
builder.startArray("location"); builder.startArray(LOCATION_FIELD);
int numPoints = points.size(); int numPoints = points.size();
for (int i = 0; i < numPoints; i++) { for (int i = 0; i < numPoints; i++) {
builder.value(points.remove(randomInt(points.size() - 1))); builder.value(points.remove(randomInt(points.size() - 1)));
@ -154,7 +199,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
*/ */
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point")); assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point"));
XContentBuilder d1Builder = jsonBuilder(); XContentBuilder d1Builder = jsonBuilder();
GeoPoint[] d1Points = {new GeoPoint(2.5, 1), new GeoPoint(2.75, 2), new GeoPoint(3, 3), new GeoPoint(3.25, 4)}; GeoPoint[] d1Points = {new GeoPoint(2.5, 1), new GeoPoint(2.75, 2), new GeoPoint(3, 3), new GeoPoint(3.25, 4)};
createShuffeldJSONArray(d1Builder, d1Points); createShuffeldJSONArray(d1Builder, d1Points);
@ -177,13 +222,13 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
int at = randomInt(3 - i); int at = randomInt(3 - i);
if (randomBoolean()) { if (randomBoolean()) {
if (geoDistanceSortBuilder == null) { if (geoDistanceSortBuilder == null) {
geoDistanceSortBuilder = new GeoDistanceSortBuilder("location", qHashes.get(at)); geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, qHashes.get(at));
} else { } else {
geoDistanceSortBuilder.geohashes(qHashes.get(at)); geoDistanceSortBuilder.geohashes(qHashes.get(at));
} }
} else { } else {
if (geoDistanceSortBuilder == null) { if (geoDistanceSortBuilder == null) {
geoDistanceSortBuilder = new GeoDistanceSortBuilder("location", qPoints.get(at)); geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, qPoints.get(at));
} else { } else {
geoDistanceSortBuilder.points(qPoints.get(at)); geoDistanceSortBuilder.points(qPoints.get(at));
} }
@ -211,15 +256,15 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
} }
public void testSinglePointGeoDistanceSort() throws ExecutionException, InterruptedException, IOException { public void testSinglePointGeoDistanceSort() throws ExecutionException, InterruptedException, IOException {
assertAcked(prepareCreate("index").addMapping("type", "location", "type=geo_point")); assertAcked(prepareCreate("index").addMapping("type", LOCATION_FIELD, "type=geo_point"));
indexRandom(true, indexRandom(true,
client().prepareIndex("index", "type", "d1").setSource(jsonBuilder().startObject().startObject("location").field("lat", 1).field("lon", 1).endObject().endObject()), client().prepareIndex("index", "type", "d1").setSource(jsonBuilder().startObject().startObject(LOCATION_FIELD).field("lat", 1).field("lon", 1).endObject().endObject()),
client().prepareIndex("index", "type", "d2").setSource(jsonBuilder().startObject().startObject("location").field("lat", 1).field("lon", 2).endObject().endObject())); client().prepareIndex("index", "type", "d2").setSource(jsonBuilder().startObject().startObject(LOCATION_FIELD).field("lat", 1).field("lon", 2).endObject().endObject()));
ensureYellow(); ensureYellow();
String hashPoint = "s037ms06g7h0"; String hashPoint = "s037ms06g7h0";
GeoDistanceSortBuilder geoDistanceSortBuilder = new GeoDistanceSortBuilder("location", hashPoint); GeoDistanceSortBuilder geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, hashPoint);
SearchResponse searchResponse = client().prepareSearch() SearchResponse searchResponse = client().prepareSearch()
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
@ -227,7 +272,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
.execute().actionGet(); .execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse); checkCorrectSortOrderForGeoSort(searchResponse);
geoDistanceSortBuilder = new GeoDistanceSortBuilder("location", new GeoPoint(2, 2)); geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, new GeoPoint(2, 2));
searchResponse = client().prepareSearch() searchResponse = client().prepareSearch()
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
@ -235,7 +280,7 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
.execute().actionGet(); .execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse); checkCorrectSortOrderForGeoSort(searchResponse);
geoDistanceSortBuilder = new GeoDistanceSortBuilder("location", 2, 2); geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, 2, 2);
searchResponse = client().prepareSearch() searchResponse = client().prepareSearch()
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
@ -246,28 +291,28 @@ public class GeoDistanceSortBuilderIT extends ESIntegTestCase {
searchResponse = client() searchResponse = client()
.prepareSearch() .prepareSearch()
.setSource( .setSource(
new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort("location", 2.0, 2.0) new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0)
.unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE))).execute().actionGet(); .unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE))).execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse); checkCorrectSortOrderForGeoSort(searchResponse);
searchResponse = client() searchResponse = client()
.prepareSearch() .prepareSearch()
.setSource( .setSource(
new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort("location", "s037ms06g7h0") new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, "s037ms06g7h0")
.unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE))).execute().actionGet(); .unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE))).execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse); checkCorrectSortOrderForGeoSort(searchResponse);
searchResponse = client() searchResponse = client()
.prepareSearch() .prepareSearch()
.setSource( .setSource(
new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort("location", 2.0, 2.0) new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0)
.unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE))).execute().actionGet(); .unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE))).execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse); checkCorrectSortOrderForGeoSort(searchResponse);
searchResponse = client() searchResponse = client()
.prepareSearch() .prepareSearch()
.setSource( .setSource(
new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort("location", 2.0, 2.0) new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0)
.unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE) .unit(DistanceUnit.KILOMETERS).geoDistance(GeoDistance.PLANE)
.ignoreMalformed(true).coerce(true))).execute().actionGet(); .ignoreMalformed(true).coerce(true))).execute().actionGet();
checkCorrectSortOrderForGeoSort(searchResponse); checkCorrectSortOrderForGeoSort(searchResponse);

View File

@ -26,6 +26,8 @@ import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.test.geo.RandomGeoGenerator; import org.elasticsearch.test.geo.RandomGeoGenerator;
@ -89,6 +91,13 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
return result; return result;
} }
@Override
protected MappedFieldType provideMappedFieldType(String name) {
MappedFieldType clone = GeoPointFieldMapper.Defaults.FIELD_TYPE.clone();
clone.setName(name);
return clone;
}
private static SortMode mode(SortMode original) { private static SortMode mode(SortMode original) {
SortMode result; SortMode result;
do { do {
@ -167,7 +176,6 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase<GeoDistanc
break; break;
} }
return result; return result;
} }
public void testSortModeSumIsRejectedInSetter() { public void testSortModeSumIsRejectedInSetter() {

View File

@ -26,6 +26,9 @@ import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.util.HashSet;
import java.util.Set;
public class RandomSortDataGenerator { public class RandomSortDataGenerator {
private RandomSortDataGenerator() { private RandomSortDataGenerator() {
// this is a helper class only, doesn't need a constructor // this is a helper class only, doesn't need a constructor
@ -44,7 +47,7 @@ public class RandomSortDataGenerator {
break; break;
default: default:
case 2: case 2:
nested = new TermQueryBuilder(ESTestCase.randomAsciiOfLengthBetween(1, 10), ESTestCase.randomAsciiOfLengthBetween(1, 10)); nested = new TermQueryBuilder(ESTestCase.randomAsciiOfLengthBetween(1, 10), ESTestCase.randomDouble());
break; break;
} }
nested.boost((float) ESTestCase.randomDoubleBetween(0, 10, false)); nested.boost((float) ESTestCase.randomDoubleBetween(0, 10, false));
@ -61,8 +64,14 @@ public class RandomSortDataGenerator {
} }
public static SortMode mode(SortMode original) { public static SortMode mode(SortMode original) {
Set<SortMode> set = new HashSet<>();
set.add(original);
return mode(set);
}
public static SortMode mode(Set<SortMode> except) {
SortMode mode = ESTestCase.randomFrom(SortMode.values()); SortMode mode = ESTestCase.randomFrom(SortMode.values());
while (mode.equals(original)) { while (except.contains(mode)) {
mode = ESTestCase.randomFrom(SortMode.values()); mode = ESTestCase.randomFrom(SortMode.values());
} }
return mode; return mode;

View File

@ -33,18 +33,29 @@ import org.junit.Rule;
import org.junit.rules.ExpectedException; import org.junit.rules.ExpectedException;
import java.io.IOException; import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuilder> { public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuilder> {
@Override @Override
protected ScriptSortBuilder createTestItem() { protected ScriptSortBuilder createTestItem() {
ScriptSortType type = randomBoolean() ? ScriptSortType.NUMBER : ScriptSortType.STRING;
ScriptSortBuilder builder = new ScriptSortBuilder(new Script(randomAsciiOfLengthBetween(5, 10)), ScriptSortBuilder builder = new ScriptSortBuilder(new Script(randomAsciiOfLengthBetween(5, 10)),
randomBoolean() ? ScriptSortType.NUMBER : ScriptSortType.STRING); type);
if (randomBoolean()) { if (randomBoolean()) {
builder.order(RandomSortDataGenerator.order(builder.order())); builder.order(RandomSortDataGenerator.order(builder.order()));
} }
if (randomBoolean()) { if (randomBoolean()) {
if (type == ScriptSortType.NUMBER) {
builder.sortMode(RandomSortDataGenerator.mode(builder.sortMode())); builder.sortMode(RandomSortDataGenerator.mode(builder.sortMode()));
} else {
Set<SortMode> exceptThis = new HashSet<>();
exceptThis.add(SortMode.SUM);
exceptThis.add(SortMode.AVG);
exceptThis.add(SortMode.MEDIAN);
builder.sortMode(RandomSortDataGenerator.mode(exceptThis));
}
} }
if (randomBoolean()) { if (randomBoolean()) {
builder.setNestedFilter(RandomSortDataGenerator.nestedFilter(builder.getNestedFilter())); builder.setNestedFilter(RandomSortDataGenerator.nestedFilter(builder.getNestedFilter()));
@ -68,7 +79,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
result = new ScriptSortBuilder(script, type.equals(ScriptSortType.NUMBER) ? ScriptSortType.STRING : ScriptSortType.NUMBER); result = new ScriptSortBuilder(script, type.equals(ScriptSortType.NUMBER) ? ScriptSortType.STRING : ScriptSortType.NUMBER);
} }
result.order(original.order()); result.order(original.order());
if (original.sortMode() != null) { if (original.sortMode() != null && result.type() == ScriptSortType.NUMBER) {
result.sortMode(original.sortMode()); result.sortMode(original.sortMode());
} }
result.setNestedFilter(original.getNestedFilter()); result.setNestedFilter(original.getNestedFilter());
@ -85,7 +96,16 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
} }
break; break;
case 1: case 1:
if (original.type() == ScriptSortType.NUMBER) {
result.sortMode(RandomSortDataGenerator.mode(original.sortMode())); result.sortMode(RandomSortDataGenerator.mode(original.sortMode()));
} else {
// script sort type String only allows MIN and MAX, so we only switch
if (original.sortMode() == SortMode.MIN) {
result.sortMode(SortMode.MAX);
} else {
result.sortMode(SortMode.MIN);
}
}
break; break;
case 2: case 2:
result.setNestedFilter(RandomSortDataGenerator.nestedFilter(original.getNestedFilter())); result.setNestedFilter(RandomSortDataGenerator.nestedFilter(original.getNestedFilter()));
@ -238,4 +258,14 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase<ScriptSortBuild
exceptionRule.expectMessage("unexpected token [START_ARRAY]"); exceptionRule.expectMessage("unexpected token [START_ARRAY]");
ScriptSortBuilder.PROTOTYPE.fromXContent(context, null); ScriptSortBuilder.PROTOTYPE.fromXContent(context, null);
} }
/**
* script sort of type {@link ScriptSortType} does not work with {@link SortMode#AVG}, {@link SortMode#MEDIAN} or {@link SortMode#SUM}
*/
public void testBadSortMode() throws IOException {
ScriptSortBuilder builder = new ScriptSortBuilder(new Script("something"), ScriptSortType.STRING);
exceptionRule.expect(IllegalArgumentException.class);
exceptionRule.expectMessage("script sort of type [string] doesn't support mode");
builder.sortMode(SortMode.fromString(randomFrom(new String[]{"avg", "median", "sum"})));
}
} }

View File

@ -122,6 +122,28 @@ dictionary to `$ES_HOME/config/userdict_ja.txt`:
東京スカイツリー,東京 スカイツリー,トウキョウ スカイツリー,カスタム名詞 東京スカイツリー,東京 スカイツリー,トウキョウ スカイツリー,カスタム名詞
----------------------- -----------------------
`nbest_cost`/`nbest_examples`::
+
--
Additional expert user parameters `nbest_cost` and `nbest_examples` can be used
to include additional tokens that most likely according to the statistical model.
If both parameters are used, the largest number of both is applied.
`nbest_cost`::
The `nbest_cost` parameter specifies an additional Viterbi cost.
The KuromojiTokenizer will include all tokens in Viterbi paths that are
within the nbest_cost value of the best path.
`nbest_examples`::
The `nbest_examples` can be used to find a `nbest_cost` value based on examples.
For example, a value of /箱根山-箱根/成田空港-成田/ indicates that in the texts,
箱根山 (Mt. Hakone) and 成田空港 (Narita Airport) we'd like a cost that gives is us
箱根 (Hakone) and 成田 (Narita).
--
Then create an analyzer as follows: Then create an analyzer as follows:
[source,json] [source,json]
@ -452,3 +474,48 @@ The above request returns:
} }
-------------------------------------------------- --------------------------------------------------
[[analysis-kuromoji-number]]
===== `kuromoji_number` token filter
The `kuromoji_number` token filter normalizes Japanese numbers (kansūji)
to regular Arabic decimal numbers in half-width characters.
[source,json]
--------------------------------------------------
PUT kuromoji_sample
{
"settings": {
"index": {
"analysis": {
"analyzer": {
"my_analyzer": {
"tokenizer": "kuromoji_tokenizer",
"filter": [
"kuromoji_number"
]
}
}
}
}
}
}
POST kuromoji_sample/_analyze?analyzer=my_analyzer&text=一〇〇〇
--------------------------------------------------
// AUTOSENSE
[source,text]
--------------------------------------------------
# Result
{
"tokens" : [ {
"token" : "1000",
"start_offset" : 0,
"end_offset" : 4,
"type" : "word",
"position" : 1
} ]
}
--------------------------------------------------

View File

@ -16,7 +16,7 @@ price for the product. The mapping could look like:
"resellers" : { <1> "resellers" : { <1>
"type" : "nested", "type" : "nested",
"properties" : { "properties" : {
"name" : { "type" : "string" }, "name" : { "type" : "text" },
"price" : { "type" : "double" } "price" : { "type" : "double" }
} }
} }

View File

@ -22,12 +22,12 @@ the issue documents as nested documents. The mapping could look like:
"issue" : { "issue" : {
"properties" : { "properties" : {
"tags" : { "type" : "string" } "tags" : { "type" : "text" }
"comments" : { <1> "comments" : { <1>
"type" : "nested" "type" : "nested"
"properties" : { "properties" : {
"username" : { "type" : "string", "index" : "not_analyzed" }, "username" : { "type" : "keyword" },
"comment" : { "type" : "string" } "comment" : { "type" : "text" }
} }
} }
} }

View File

@ -4,4 +4,4 @@
An analyzer of type `keyword` that "tokenizes" an entire stream as a An analyzer of type `keyword` that "tokenizes" an entire stream as a
single token. This is useful for data like zip codes, ids and so on. single token. This is useful for data like zip codes, ids and so on.
Note, when using mapping definitions, it might make more sense to simply Note, when using mapping definitions, it might make more sense to simply
mark the field as `not_analyzed`. map the field as a <<keyword,`keyword`>>.

View File

@ -114,10 +114,18 @@ node (c) |d
cache memory |0b cache memory |0b
|`fielddata.evictions` |`fe`, `fielddataEvictions` |No |Fielddata cache |`fielddata.evictions` |`fe`, `fielddataEvictions` |No |Fielddata cache
evictions |0 evictions |0
|`filter_cache.memory_size` |`fcm`, `filterCacheMemory` |No |Used filter |`query_cache.memory_size` |`qcm`, `queryCacheMemory` |No |Used query
cache memory |0b cache memory |0b
|`filter_cache.evictions` |`fce`, `filterCacheEvictions` |No |Filter |`query_cache.evictions` |`qce`, `queryCacheEvictions` |No |Query
cache evictions |0 cache evictions |0
|`request_cache.memory_size` |`rcm`, `requestCacheMemory` |No | Used request
cache memory |0b
|`request_cache.evictions` |`rce`, `requestCacheEvictions` |No |Request
cache evictions |0
|`request_cache.hit_count` |`rchc`, `requestCacheHitCount` |No | Request
cache hit count |0
|`request_cache.miss_count` |`rcmc`, `requestCacheMissCount` |No | Request
cache miss count |0
|`flush.total` |`ft`, `flushTotal` |No |Number of flushes |1 |`flush.total` |`ft`, `flushTotal` |No |Number of flushes |1
|`flush.total_time` |`ftt`, `flushTotalTime` |No |Time spent in flush |1 |`flush.total_time` |`ftt`, `flushTotalTime` |No |Time spent in flush |1
|`get.current` |`gc`, `getCurrent` |No |Number of current get |`get.current` |`gc`, `getCurrent` |No |Number of current get

View File

@ -299,7 +299,8 @@ POST /_reindex
=== URL Parameters === URL Parameters
In addition to the standard parameters like `pretty`, the Reindex API also In addition to the standard parameters like `pretty`, the Reindex API also
supports `refresh`, `wait_for_completion`, `consistency`, and `timeout`. supports `refresh`, `wait_for_completion`, `consistency`, `timeout`, and
`requests_per_second`.
Sending the `refresh` url parameter will cause all indexes to which the request Sending the `refresh` url parameter will cause all indexes to which the request
wrote to be refreshed. This is different than the Index API's `refresh` wrote to be refreshed. This is different than the Index API's `refresh`
@ -317,8 +318,14 @@ request. `timeout` controls how long each write request waits for unavailable
shards to become available. Both work exactly how they work in the shards to become available. Both work exactly how they work in the
{ref}/docs-bulk.html[Bulk API]. {ref}/docs-bulk.html[Bulk API].
`timeout` controls how long each batch waits for the target shard to become `requests_per_second` can be set to any decimal number (1.4, 6, 1000, etc) and
available. It works exactly how it works in the {ref}/docs-bulk.html[Bulk API]. throttle the number of requests per second that the reindex issues. The
throttling is done waiting between bulk batches so that it can manipulate the
scroll timeout. The wait time is the difference between the time it took the
batch to complete and the time `requests_per_second * requests_in_the_batch`.
Since the batch isn't broken into multiple bulk requests large batch sizes will
cause Elasticsearch to create many requests and then wait for a while before
starting the next set. This is "bursty" instead of "smooth".
[float] [float]
=== Response body === Response body
@ -333,6 +340,8 @@ The JSON response looks like this:
"created": 123, "created": 123,
"batches": 1, "batches": 1,
"version_conflicts": 2, "version_conflicts": 2,
"retries": 0,
"throttled_millis": 0,
"failures" : [ ] "failures" : [ ]
} }
-------------------------------------------------- --------------------------------------------------
@ -357,6 +366,14 @@ The number of scroll responses pulled back by the the reindex.
The number of version conflicts that reindex hit. The number of version conflicts that reindex hit.
`retries`::
The number of retries that the reindex did in response to a full queue.
`throttled_millis`::
Number of milliseconds the request slept to conform to `requests_per_second`.
`failures`:: `failures`::
Array of all indexing failures. If this is non-empty then the request aborted Array of all indexing failures. If this is non-empty then the request aborted
@ -403,7 +420,9 @@ The responses looks like:
"deleted" : 0, "deleted" : 0,
"batches" : 36, "batches" : 36,
"version_conflicts" : 0, "version_conflicts" : 0,
"noops" : 0 "noops" : 0,
"retries": 0,
"throttled_millis": 0
}, },
"description" : "" "description" : ""
} ] } ]

View File

@ -136,13 +136,13 @@ curl -s -XPUT 'http://localhost:9200/twitter/' -d '{
"tweet": { "tweet": {
"properties": { "properties": {
"text": { "text": {
"type": "string", "type": "text",
"term_vector": "with_positions_offsets_payloads", "term_vector": "with_positions_offsets_payloads",
"store" : true, "store" : true,
"analyzer" : "fulltext_analyzer" "analyzer" : "fulltext_analyzer"
}, },
"fullname": { "fullname": {
"type": "string", "type": "text",
"term_vector": "with_positions_offsets_payloads", "term_vector": "with_positions_offsets_payloads",
"analyzer" : "fulltext_analyzer" "analyzer" : "fulltext_analyzer"
} }

View File

@ -169,8 +169,14 @@ request. `timeout` controls how long each write request waits for unavailable
shards to become available. Both work exactly how they work in the shards to become available. Both work exactly how they work in the
{ref}/docs-bulk.html[Bulk API]. {ref}/docs-bulk.html[Bulk API].
`timeout` controls how long each batch waits for the target shard to become `requests_per_second` can be set to any decimal number (1.4, 6, 1000, etc) and
available. It works exactly how it works in the {ref}/docs-bulk.html[Bulk API]. throttle the number of requests per second that the update by query issues. The
throttling is done waiting between bulk batches so that it can manipulate the
scroll timeout. The wait time is the difference between the time it took the
batch to complete and the time `requests_per_second * requests_in_the_batch`.
Since the batch isn't broken into multiple bulk requests large batch sizes will
cause Elasticsearch to create many requests and then wait for a while before
starting the next set. This is "bursty" instead of "smooth".
[float] [float]
=== Response body === Response body
@ -184,6 +190,8 @@ The JSON response looks like this:
"updated": 0, "updated": 0,
"batches": 1, "batches": 1,
"version_conflicts": 2, "version_conflicts": 2,
"retries": 0,
"throttled_millis": 0,
"failures" : [ ] "failures" : [ ]
} }
-------------------------------------------------- --------------------------------------------------
@ -204,6 +212,14 @@ The number of scroll responses pulled back by the the update by query.
The number of version conflicts that the update by query hit. The number of version conflicts that the update by query hit.
`retries`::
The number of retries that the update by query did in response to a full queue.
`throttled_millis`::
Number of milliseconds the request slept to conform to `requests_per_second`.
`failures`:: `failures`::
Array of all indexing failures. If this is non-empty then the request aborted Array of all indexing failures. If this is non-empty then the request aborted
@ -251,7 +267,9 @@ The responses looks like:
"deleted" : 0, "deleted" : 0,
"batches" : 36, "batches" : 36,
"version_conflicts" : 0, "version_conflicts" : 0,
"noops" : 0 "noops" : 0,
"retries": 0,
"throttled_millis": 0
}, },
"description" : "" "description" : ""
} ] } ]
@ -281,7 +299,7 @@ PUT test
"test": { "test": {
"dynamic": false, <1> "dynamic": false, <1>
"properties": { "properties": {
"text": {"type": "string"} "text": {"type": "text"}
} }
} }
} }
@ -300,8 +318,8 @@ POST test/test?refresh
PUT test/_mapping/test <2> PUT test/_mapping/test <2>
{ {
"properties": { "properties": {
"text": {"type": "string"}, "text": {"type": "text"},
"flag": {"type": "string", "analyzer": "keyword"} "flag": {"type": "text", "analyzer": "keyword"}
} }
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -39,7 +39,7 @@ Here we configure the DFRSimilarity so it can be referenced as
{ {
"book" : { "book" : {
"properties" : { "properties" : {
"title" : { "type" : "string", "similarity" : "my_similarity" } "title" : { "type" : "text", "similarity" : "my_similarity" }
} }
} }
-------------------------------------------------- --------------------------------------------------

View File

@ -116,8 +116,7 @@ curl -XPUT 'http://localhost:9200/test1' -d '{
"type1": { "type1": {
"properties": { "properties": {
"user" : { "user" : {
"type": "string", "type": "keyword"
"index": "not_analyzed"
} }
} }
} }

View File

@ -78,7 +78,7 @@ curl -XPOST localhost:9200/test -d '{
"mappings" : { "mappings" : {
"type1" : { "type1" : {
"properties" : { "properties" : {
"field1" : { "type" : "string", "index" : "not_analyzed" } "field1" : { "type" : "text" }
} }
} }
} }

View File

@ -22,7 +22,7 @@ For which the response is (assuming `text` is a default string field):
"text": { "text": {
"full_name": "text", "full_name": "text",
"mapping": { "mapping": {
"text": { "type": "string" } "text": { "type": "text" }
} }
} }
} }
@ -73,13 +73,13 @@ For example, consider the following mapping:
{ {
"article": { "article": {
"properties": { "properties": {
"id": { "type": "string" }, "id": { "type": "text" },
"title": { "type": "string"}, "title": { "type": "text"},
"abstract": { "type": "string"}, "abstract": { "type": "text"},
"author": { "author": {
"properties": { "properties": {
"id": { "type": "string" }, "id": { "type": "text" },
"name": { "type": "string" } "name": { "type": "text" }
} }
} }
} }
@ -105,19 +105,19 @@ returns:
"abstract": { "abstract": {
"full_name": "abstract", "full_name": "abstract",
"mapping": { "mapping": {
"abstract": { "type": "string" } "abstract": { "type": "text" }
} }
}, },
"author.id": { "author.id": {
"full_name": "author.id", "full_name": "author.id",
"mapping": { "mapping": {
"id": { "type": "string" } "id": { "type": "text" }
} }
}, },
"name": { "name": {
"full_name": "author.name", "full_name": "author.name",
"mapping": { "mapping": {
"name": { "type": "string" } "name": { "type": "text" }
} }
} }
} }

View File

@ -12,7 +12,7 @@ PUT twitter <1>
"tweet": { "tweet": {
"properties": { "properties": {
"message": { "message": {
"type": "string" "type": "text"
} }
} }
} }
@ -23,7 +23,7 @@ PUT twitter/_mapping/user <2>
{ {
"properties": { "properties": {
"name": { "name": {
"type": "string" "type": "text"
} }
} }
} }
@ -32,7 +32,7 @@ PUT twitter/_mapping/tweet <3>
{ {
"properties": { "properties": {
"user_name": { "user_name": {
"type": "string" "type": "text"
} }
} }
} }
@ -86,13 +86,12 @@ PUT my_index <1>
"name": { "name": {
"properties": { "properties": {
"first": { "first": {
"type": "string" "type": "text"
} }
} }
}, },
"user_id": { "user_id": {
"type": "string", "type": "keyword"
"index": "not_analyzed"
} }
} }
} }
@ -105,13 +104,12 @@ PUT my_index/_mapping/user
"name": { "name": {
"properties": { "properties": {
"last": { <2> "last": { <2>
"type": "string" "type": "text"
} }
} }
}, },
"user_id": { "user_id": {
"type": "string", "type": "keyword",
"index": "not_analyzed",
"ignore_above": 100 <3> "ignore_above": 100 <3>
} }
} }
@ -149,7 +147,7 @@ PUT my_index
"type_one": { "type_one": {
"properties": { "properties": {
"text": { <1> "text": { <1>
"type": "string", "type": "text",
"analyzer": "standard" "analyzer": "standard"
} }
} }
@ -157,7 +155,7 @@ PUT my_index
"type_two": { "type_two": {
"properties": { "properties": {
"text": { <1> "text": { <1>
"type": "string", "type": "text",
"analyzer": "standard" "analyzer": "standard"
} }
} }
@ -169,7 +167,7 @@ PUT my_index/_mapping/type_one <2>
{ {
"properties": { "properties": {
"text": { "text": {
"type": "string", "type": "text",
"analyzer": "standard", "analyzer": "standard",
"search_analyzer": "whitespace" "search_analyzer": "whitespace"
} }
@ -180,7 +178,7 @@ PUT my_index/_mapping/type_one?update_all_types <3>
{ {
"properties": { "properties": {
"text": { "text": {
"type": "string", "type": "text",
"analyzer": "standard", "analyzer": "standard",
"search_analyzer": "whitespace" "search_analyzer": "whitespace"
} }

View File

@ -46,7 +46,7 @@ Fields with the same name in different mapping types in the same index
Each field has a data `type` which can be: Each field has a data `type` which can be:
* a simple type like <<string,`string`>>, <<date,`date`>>, <<number,`long`>>, * a simple type like <<text,`text`>>, <<keyword,`keyword`>>, <<date,`date`>>, <<number,`long`>>,
<<number,`double`>>, <<boolean,`boolean`>> or <<ip,`ip`>>. <<number,`double`>>, <<boolean,`boolean`>> or <<ip,`ip`>>.
* a type which supports the hierarchical nature of JSON such as * a type which supports the hierarchical nature of JSON such as
<<object,`object`>> or <<nested,`nested`>>. <<object,`object`>> or <<nested,`nested`>>.
@ -55,7 +55,7 @@ Each field has a data `type` which can be:
It is often useful to index the same field in different ways for different It is often useful to index the same field in different ways for different
purposes. For instance, a `string` field could be <<mapping-index,indexed>> as purposes. For instance, a `string` field could be <<mapping-index,indexed>> as
an `analyzed` field for full-text search, and as a `not_analyzed` field for a `text` field for full-text search, and as a `keyword` field for
sorting or aggregations. Alternatively, you could index a string field with sorting or aggregations. Alternatively, you could index a string field with
the <<analysis-standard-analyzer,`standard` analyzer>>, the the <<analysis-standard-analyzer,`standard` analyzer>>, the
<<english-analyzer,`english`>> analyzer, and the <<english-analyzer,`english`>> analyzer, and the
@ -134,18 +134,17 @@ PUT my_index <1>
"user": { <2> "user": { <2>
"_all": { "enabled": false }, <3> "_all": { "enabled": false }, <3>
"properties": { <4> "properties": { <4>
"title": { "type": "string" }, <5> "title": { "type": "text" }, <5>
"name": { "type": "string" }, <5> "name": { "type": "text" }, <5>
"age": { "type": "integer" } <5> "age": { "type": "integer" } <5>
} }
}, },
"blogpost": { <2> "blogpost": { <2>
"properties": { <4> "properties": { <4>
"title": { "type": "string" }, <5> "title": { "type": "text" }, <5>
"body": { "type": "string" }, <5> "body": { "type": "text" }, <5>
"user_id": { "user_id": {
"type": "string", <5> "type": "keyword" <5>
"index": "not_analyzed"
}, },
"created": { "created": {
"type": "date", <5> "type": "date", <5>

View File

@ -56,11 +56,10 @@ PUT _template/logging
"strings": { <4> "strings": { <4>
"match_mapping_type": "string", "match_mapping_type": "string",
"mapping": { "mapping": {
"type": "string", "type": "text",
"fields": { "fields": {
"raw": { "raw": {
"type": "string", "type": "keyword",
"index": "not_analyzed",
"ignore_above": 256 "ignore_above": 256
} }
} }
@ -79,4 +78,4 @@ PUT logs-2015.10.01/event/1
<1> The `logging` template will match any indices beginning with `logs-`. <1> The `logging` template will match any indices beginning with `logs-`.
<2> Matching indices will be created with a single primary shard. <2> Matching indices will be created with a single primary shard.
<3> The `_all` field will be disabled by default for new type mappings. <3> The `_all` field will be disabled by default for new type mappings.
<4> String fields will be created with an `analyzed` main field, and a `not_analyzed` `.raw` field. <4> String fields will be created with a `text` main field, and a `keyword` `.raw` field.

View File

@ -22,7 +22,7 @@ string:: Either a <<date,`date`>> field
(if the value passes <<date-detection,date detection>>), (if the value passes <<date-detection,date detection>>),
a <<number,`double`>> or <<number,`long`>> field a <<number,`double`>> or <<number,`long`>> field
(if the value passes <<numeric-detection,numeric detection>>) (if the value passes <<numeric-detection,numeric detection>>)
or an <<mapping-index,`analyzed`>> <<string,`string`>> field. or an <<text,`text`>> field.
These are the only <<mapping-types,field datatypes>> that are dynamically These are the only <<mapping-types,field datatypes>> that are dynamically
detected. All other datatypes must be mapped explicitly. detected. All other datatypes must be mapped explicitly.
@ -81,7 +81,7 @@ PUT my_index/my_type/1 <1>
-------------------------------------------------- --------------------------------------------------
// AUTOSENSE // AUTOSENSE
<1> The `create_date` field has been added as a <<string,`string`>> field. <1> The `create_date` field has been added as a <<text,`text`>> field.
===== Customising detected date formats ===== Customising detected date formats

View File

@ -52,7 +52,7 @@ can be automatically detected: `boolean`, `date`, `double`, `long`, `object`,
`string`. It also accepts `*` to match all datatypes. `string`. It also accepts `*` to match all datatypes.
For example, if we wanted to map all integer fields as `integer` instead of For example, if we wanted to map all integer fields as `integer` instead of
`long`, and all `string` fields as both `analyzed` and `not_analyzed`, we `long`, and all `string` fields as both `text` and `keyword`, we
could use the following template: could use the following template:
[source,js] [source,js]
@ -74,11 +74,10 @@ PUT my_index
"strings": { "strings": {
"match_mapping_type": "string", "match_mapping_type": "string",
"mapping": { "mapping": {
"type": "string", "type": "text",
"fields": { "fields": {
"raw": { "raw": {
"type": "string", "type": "keyword",
"index": "not_analyzed",
"ignore_above": 256 "ignore_above": 256
} }
} }
@ -99,7 +98,7 @@ PUT my_index/my_type/1
-------------------------------------------------- --------------------------------------------------
// AUTOSENSE // AUTOSENSE
<1> The `my_integer` field is mapped as an `integer`. <1> The `my_integer` field is mapped as an `integer`.
<2> The `my_string` field is mapped as an analyzed `string`, with a `not_analyzed` <<multi-fields,multi field>>. <2> The `my_string` field is mapped as a `text`, with a `keyword` <<multi-fields,multi field>>.
[[match-unmatch]] [[match-unmatch]]
@ -180,7 +179,7 @@ PUT my_index
"path_match": "name.*", "path_match": "name.*",
"path_unmatch": "*.middle", "path_unmatch": "*.middle",
"mapping": { "mapping": {
"type": "string", "type": "text",
"copy_to": "full_name" "copy_to": "full_name"
} }
} }
@ -221,7 +220,7 @@ PUT my_index
"match_mapping_type": "string", "match_mapping_type": "string",
"match": "*", "match": "*",
"mapping": { "mapping": {
"type": "string", "type": "text",
"analyzer": "{name}" "analyzer": "{name}"
} }
} }

View File

@ -45,7 +45,7 @@ from each field as a string. It does not combine the _terms_ from each field.
============================================================================= =============================================================================
The `_all` field is just a <<string,`string`>> field, and accepts the same The `_all` field is just a <<text,`text`>> field, and accepts the same
parameters that other string fields accept, including `analyzer`, parameters that other string fields accept, including `analyzer`,
`term_vectors`, `index_options`, and `store`. `term_vectors`, `index_options`, and `store`.
@ -136,7 +136,7 @@ PUT my_index
}, },
"properties": { "properties": {
"content": { "content": {
"type": "string" "type": "text"
} }
} }
} }
@ -172,11 +172,11 @@ PUT myindex
"mytype": { "mytype": {
"properties": { "properties": {
"title": { <1> "title": { <1>
"type": "string", "type": "text",
"boost": 2 "boost": 2
}, },
"content": { <1> "content": { <1>
"type": "string" "type": "text"
} }
} }
} }
@ -210,15 +210,15 @@ PUT myindex
"mytype": { "mytype": {
"properties": { "properties": {
"first_name": { "first_name": {
"type": "string", "type": "text",
"copy_to": "full_name" <1> "copy_to": "full_name" <1>
}, },
"last_name": { "last_name": {
"type": "string", "type": "text",
"copy_to": "full_name" <1> "copy_to": "full_name" <1>
}, },
"full_name": { "full_name": {
"type": "string" "type": "text"
} }
} }
} }

View File

@ -127,7 +127,7 @@ global ordinals for the `_parent` field.
Global ordinals, by default, are built lazily: the first parent-child query or Global ordinals, by default, are built lazily: the first parent-child query or
aggregation after a refresh will trigger building of global ordinals. This can aggregation after a refresh will trigger building of global ordinals. This can
introduce a significant latency spike for your users. You can use introduce a significant latency spike for your users. You can use
<<fielddata-loading,eager_global_ordinals>> to shift the cost of building global <<global-ordinals,eager_global_ordinals>> to shift the cost of building global
ordinals from query time to refresh time, by mapping the `_parent` field as follows: ordinals from query time to refresh time, by mapping the `_parent` field as follows:
[source,js] [source,js]
@ -139,9 +139,7 @@ PUT my_index
"my_child": { "my_child": {
"_parent": { "_parent": {
"type": "my_parent", "type": "my_parent",
"fielddata": { "eager_global_ordinals": true
"loading": "eager_global_ordinals"
}
} }
} }
} }

View File

@ -47,10 +47,10 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"text": { <1> "text": { <1>
"type": "string", "type": "text",
"fields": { "fields": {
"english": { <2> "english": { <2>
"type": "string", "type": "text",
"analyzer": "english" "analyzer": "english"
} }
} }
@ -124,7 +124,7 @@ PUT /my_index
"my_type":{ "my_type":{
"properties":{ "properties":{
"title": { "title": {
"type":"string", "type":"text",
"analyzer":"my_analyzer", <3> "analyzer":"my_analyzer", <3>
"search_analyzer":"my_stop_analyzer", <4> "search_analyzer":"my_stop_analyzer", <4>
"search_quote_analyzer":"my_analyzer" <5> "search_quote_analyzer":"my_analyzer" <5>

View File

@ -12,11 +12,11 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"title": { "title": {
"type": "string", "type": "text",
"boost": 2 <1> "boost": 2 <1>
}, },
"content": { "content": {
"type": "string" "type": "text"
} }
} }
} }

View File

@ -15,15 +15,15 @@ PUT /my_index
"my_type": { "my_type": {
"properties": { "properties": {
"first_name": { "first_name": {
"type": "string", "type": "text",
"copy_to": "full_name" <1> "copy_to": "full_name" <1>
}, },
"last_name": { "last_name": {
"type": "string", "type": "text",
"copy_to": "full_name" <1> "copy_to": "full_name" <1>
}, },
"full_name": { "full_name": {
"type": "string" "type": "text"
} }
} }
} }

View File

@ -29,12 +29,10 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"status_code": { <1> "status_code": { <1>
"type": "string", "type": "keyword"
"index": "not_analyzed"
}, },
"session_id": { <2> "session_id": { <2>
"type": "string", "type": "keyword",
"index": "not_analyzed",
"doc_values": false "doc_values": false
} }
} }

View File

@ -67,7 +67,7 @@ PUT my_index
"user": { <2> "user": { <2>
"properties": { "properties": {
"name": { "name": {
"type": "string" "type": "text"
}, },
"social_networks": { <3> "social_networks": { <3>
"dynamic": true, "dynamic": true,

View File

@ -21,8 +21,7 @@ PUT my_index
"session": { "session": {
"properties": { "properties": {
"user_id": { "user_id": {
"type": "string", "type": "keyword"
"index": "not_analyzed"
}, },
"last_updated": { "last_updated": {
"type": "date" "type": "date"

View File

@ -12,28 +12,28 @@ documents, we need to be able to look up the document and find the terms that
it has in a field. it has in a field.
Most fields can use index-time, on-disk <<doc-values,`doc_values`>> to support Most fields can use index-time, on-disk <<doc-values,`doc_values`>> to support
this type of data access pattern, but `analyzed` string fields do not support this type of data access pattern, but `text` fields do not support `doc_values`.
`doc_values`.
Instead, `analyzed` strings use a query-time data structure called Instead, `text` strings use a query-time data structure called
`fielddata`. This data structure is built on demand the first time that a `fielddata`. This data structure is built on demand the first time that a
field is used for aggregations, sorting, or is accessed in a script. It is built field is used for aggregations, sorting, or is accessed in a script. It is built
by reading the entire inverted index for each segment from disk, inverting the by reading the entire inverted index for each segment from disk, inverting the
term ↔︎ document relationship, and storing the result in memory, in the term ↔︎ document relationship, and storing the result in memory, in the
JVM heap. JVM heap.
Loading fielddata is an expensive process so, once it has been loaded, it Loading fielddata is an expensive process so it is disabled by default. Also,
remains in memory for the lifetime of the segment. when enabled, once it has been loaded, it remains in memory for the lifetime of
the segment.
[WARNING] [WARNING]
.Fielddata can fill up your heap space .Fielddata can fill up your heap space
============================================================================== ==============================================================================
Fielddata can consume a lot of heap space, especially when loading high Fielddata can consume a lot of heap space, especially when loading high
cardinality `analyzed` string fields. Most of the time, it doesn't make sense cardinality `text` fields. Most of the time, it doesn't make sense
to sort or aggregate on `analyzed` string fields (with the notable exception to sort or aggregate on `text` fields (with the notable exception
of the of the
<<search-aggregations-bucket-significantterms-aggregation,`significant_terms`>> <<search-aggregations-bucket-significantterms-aggregation,`significant_terms`>>
aggregation). Always think about whether a `not_analyzed` field (which can aggregation). Always think about whether a <<keyword,`keyword`>> field (which can
use `doc_values`) would be a better fit for your use case. use `doc_values`) would be a better fit for your use case.
============================================================================== ==============================================================================
@ -42,71 +42,6 @@ same name in the same index. Its value can be updated on existing fields
using the <<indices-put-mapping,PUT mapping API>>. using the <<indices-put-mapping,PUT mapping API>>.
[[fielddata-format]]
==== `fielddata.format`
For `analyzed` string fields, the fielddata `format` controls whether
fielddata should be enabled or not. It accepts: `disabled` and `paged_bytes`
(enabled, which is the default). To disable fielddata loading, you can use
the following mapping:
[source,js]
--------------------------------------------------
PUT my_index
{
"mappings": {
"my_type": {
"properties": {
"text": {
"type": "string",
"fielddata": {
"format": "disabled" <1>
}
}
}
}
}
}
--------------------------------------------------
// AUTOSENSE
<1> The `text` field cannot be used for sorting, aggregations, or in scripts.
.Fielddata and other datatypes
[NOTE]
==================================================
Historically, other field datatypes also used fielddata, but this has been replaced
by index-time, disk-based <<doc-values,`doc_values`>>.
==================================================
[[fielddata-loading]]
==== `fielddata.loading`
This per-field setting controls when fielddata is loaded into memory. It
accepts three options:
[horizontal]
`lazy`::
Fielddata is only loaded into memory when it is needed. (default)
`eager`::
Fielddata is loaded into memory before a new search segment becomes
visible to search. This can reduce the latency that a user may experience
if their search request has to trigger lazy loading from a big segment.
`eager_global_ordinals`::
Loading fielddata into memory is only part of the work that is required.
After loading the fielddata for each segment, Elasticsearch builds the
<<global-ordinals>> data structure to make a list of all unique terms
across all the segments in a shard. By default, global ordinals are built
lazily. If the field has a very high cardinality, global ordinals may
take some time to build, in which case you can use eager loading instead.
[[global-ordinals]] [[global-ordinals]]
.Global ordinals .Global ordinals
***************************************** *****************************************
@ -141,15 +76,10 @@ can move the loading time from the first search request, to the refresh itself.
***************************************** *****************************************
[[field-data-filtering]] [[field-data-filtering]]
==== `fielddata.filter` ==== `fielddata_frequency_filter`
Fielddata filtering can be used to reduce the number of terms loaded into Fielddata filtering can be used to reduce the number of terms loaded into
memory, and thus reduce memory usage. Terms can be filtered by _frequency_ or memory, and thus reduce memory usage. Terms can be filtered by _frequency_:
by _regular expression_, or a combination of the two:
Filtering by frequency::
+
--
The frequency filter allows you to only load terms whose term frequency falls The frequency filter allows you to only load terms whose term frequency falls
between a `min` and `max` value, which can be expressed an absolute between a `min` and `max` value, which can be expressed an absolute
@ -169,7 +99,7 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"tag": { "tag": {
"type": "string", "type": "text",
"fielddata": { "fielddata": {
"filter": { "filter": {
"frequency": { "frequency": {
@ -186,44 +116,3 @@ PUT my_index
} }
-------------------------------------------------- --------------------------------------------------
// AUTOSENSE // AUTOSENSE
--
Filtering by regex::
+
--
Terms can also be filtered by regular expression - only values which
match the regular expression are loaded. Note: the regular expression is
applied to each term in the field, not to the whole field value. For
instance, to only load hashtags from a tweet, we can use a regular
expression which matches terms beginning with `#`:
[source,js]
--------------------------------------------------
PUT my_index
{
"mappings": {
"my_type": {
"properties": {
"tweet": {
"type": "string",
"analyzer": "whitespace",
"fielddata": {
"filter": {
"regex": {
"pattern": "^#.*"
}
}
}
}
}
}
}
}
--------------------------------------------------
// AUTOSENSE
--
These filters can be updated on an existing field mapping and will take
effect the next time the fielddata for a segment is loaded. Use the
<<indices-clearcache,Clear Cache>> API
to reload the fielddata using the new filters.

View File

@ -1,12 +1,7 @@
[[ignore-above]] [[ignore-above]]
=== `ignore_above` === `ignore_above`
Strings longer than the `ignore_above` setting will not be processed by the Strings longer than the `ignore_above` setting will not be indexed or stored.
<<analyzer,analyzer>> and will not be indexed. This is mainly useful for
<<mapping-index,`not_analyzed`>> string fields, which are typically used for
filtering, aggregations, and sorting. These are structured fields and it
doesn't usually make sense to allow very long terms to be indexed in these
fields.
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
@ -16,8 +11,7 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"message": { "message": {
"type": "string", "type": "keyword",
"index": "not_analyzed",
"ignore_above": 20 <1> "ignore_above": 20 <1>
} }
} }

View File

@ -14,10 +14,10 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"title": { <1> "title": { <1>
"type": "string" "type": "text"
} }
"content": { <1> "content": { <1>
"type": "string" "type": "text"
}, },
"date": { <2> "date": { <2>
"type": "date", "type": "date",
@ -50,18 +50,18 @@ PUT my_index
"my_type": { "my_type": {
"include_in_all": false, <1> "include_in_all": false, <1>
"properties": { "properties": {
"title": { "type": "string" }, "title": { "type": "text" },
"author": { "author": {
"include_in_all": true, <2> "include_in_all": true, <2>
"properties": { "properties": {
"first_name": { "type": "string" }, "first_name": { "type": "text" },
"last_name": { "type": "string" } "last_name": { "type": "text" }
} }
}, },
"editor": { "editor": {
"properties": { "properties": {
"first_name": { "type": "string" }, <3> "first_name": { "type": "text" }, <3>
"last_name": { "type": "string", "include_in_all": true } <3> "last_name": { "type": "text", "include_in_all": true } <3>
} }
} }
} }

View File

@ -39,7 +39,7 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"text": { "text": {
"type": "string", "type": "text",
"index_options": "offsets" "index_options": "offsets"
} }
} }

View File

@ -1,48 +1,6 @@
[[mapping-index]] [[mapping-index]]
=== `index` === `index`
The `index` option controls how field values are indexed and, thus, how they The `index` option controls whether field values are indexed. It accepts `true`
are searchable. It accepts three values: or `false`. Fields that are not indexed are not queryable.
[horizontal]
`no`::
Do not add this field value to the index. With this setting, the field
will not be queryable.
`not_analyzed`::
Add the field value to the index unchanged, as a single term. This is the
default for all fields that support this option except for
<<string,`string`>> fields. `not_analyzed` fields are usually used with
<<term-level-queries,term-level queries>> for structured search.
`analyzed`::
This option applies only to `string` fields, for which it is the default.
The string field value is first <<analysis,analyzed>> to convert the
string into terms (e.g. a list of individual words), which are then
indexed. At search time, the query string is passed through
(<<search-analyzer,usually>>) the same analyzer to generate terms
in the same format as those in the index. It is this process that enables
<<full-text-queries,full text search>>.
For example, you can create a `not_analyzed` string field with the following:
[source,js]
--------------------------------------------------
PUT /my_index
{
"mappings": {
"my_type": {
"properties": {
"status_code": {
"type": "string",
"index": "not_analyzed"
}
}
}
}
}
--------------------------------------------------
// AUTOSENSE

View File

@ -3,8 +3,8 @@
It is often useful to index the same field in different ways for different It is often useful to index the same field in different ways for different
purposes. This is the purpose of _multi-fields_. For instance, a `string` purposes. This is the purpose of _multi-fields_. For instance, a `string`
field could be <<mapping-index,indexed>> as an `analyzed` field for full-text field could be mapped as a `text` field for full-text
search, and as a `not_analyzed` field for sorting or aggregations: search, and as a `keyword` field for sorting or aggregations:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
@ -14,11 +14,10 @@ PUT /my_index
"my_type": { "my_type": {
"properties": { "properties": {
"city": { "city": {
"type": "string", "type": "text",
"fields": { "fields": {
"raw": { <1> "raw": { <1>
"type": "string", "type": "keyword"
"index": "not_analyzed"
} }
} }
} }
@ -57,8 +56,8 @@ GET /my_index/_search
} }
-------------------------------------------------- --------------------------------------------------
// AUTOSENSE // AUTOSENSE
<1> The `city.raw` field is a `not_analyzed` version of the `city` field. <1> The `city.raw` field is a `keyword` version of the `city` field.
<2> The analyzed `city` field can be used for full text search. <2> The `city` field can be used for full text search.
<3> The `city.raw` field can be used for sorting and aggregations <3> The `city.raw` field can be used for sorting and aggregations
NOTE: Multi-fields do not change the original `_source` field. NOTE: Multi-fields do not change the original `_source` field.
@ -83,10 +82,10 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"text": { <1> "text": { <1>
"type": "string", "type": "text",
"fields": { "fields": {
"english": { <2> "english": { <2>
"type": "string", "type": "text",
"analyzer": "english" "analyzer": "english"
} }
} }

View File

@ -4,14 +4,14 @@
Norms store various normalization factors that are later used at query time Norms store various normalization factors that are later used at query time
in order to compute the score of a document relatively to a query. in order to compute the score of a document relatively to a query.
Although useful for scoring, norms also require quite a lot of memory Although useful for scoring, norms also require quite a lot of disk
(typically in the order of one byte per document per field in your index, even (typically in the order of one byte per document per field in your index, even
for documents that don't have this specific field). As a consequence, if you for documents that don't have this specific field). As a consequence, if you
don't need scoring on a specific field, you should disable norms on that don't need scoring on a specific field, you should disable norms on that
field. In particular, this is the case for fields that are used solely for field. In particular, this is the case for fields that are used solely for
filtering or aggregations. filtering or aggregations.
TIP: The `norms.enabled` setting must have the same setting for fields of the TIP: The `norms` setting must have the same setting for fields of the
same name in the same index. Norms can be disabled on existing fields using same name in the same index. Norms can be disabled on existing fields using
the <<indices-put-mapping,PUT mapping API>>. the <<indices-put-mapping,PUT mapping API>>.
@ -24,10 +24,8 @@ PUT my_index/_mapping/my_type
{ {
"properties": { "properties": {
"title": { "title": {
"type": "string", "type": "text",
"norms": { "norms": false
"enabled": false
}
} }
} }
} }
@ -41,31 +39,3 @@ results since some documents won't have norms anymore while other documents
might still have norms. might still have norms.
==== Lazy loading of norms
Norms can be loaded into memory eagerly (`eager`), whenever a new segment
comes online, or they can loaded lazily (`lazy`, default), only when the field
is queried.
Eager loading can be configured as follows:
[source,js]
------------
PUT my_index/_mapping/my_type
{
"properties": {
"title": {
"type": "string",
"norms": {
"loading": "eager"
}
}
}
}
------------
// AUTOSENSE
TIP: The `norms.loading` setting must have the same setting for fields of the
same name in the same index. Its value can be updated on existing fields
using the <<indices-put-mapping,PUT mapping API>>.

View File

@ -16,8 +16,7 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"status_code": { "status_code": {
"type": "string", "type": "keyword",
"index": "not_analyzed",
"null_value": "NULL" <1> "null_value": "NULL" <1>
} }
} }
@ -50,6 +49,4 @@ GET my_index/_search
<3> A query for `NULL` returns document 1, but not document 2. <3> A query for `NULL` returns document 1, but not document 2.
IMPORTANT: The `null_value` needs to be the same datatype as the field. For IMPORTANT: The `null_value` needs to be the same datatype as the field. For
instance, a `long` field cannot have a string `null_value`. String fields instance, a `long` field cannot have a string `null_value`.
which are `analyzed` will also pass the `null_value` through the configured
analyzer.

View File

@ -57,7 +57,7 @@ PUT my_index
"groups": { "groups": {
"properties": { "properties": {
"names": { "names": {
"type": "string", "type": "text",
"position_increment_gap": 0 <1> "position_increment_gap": 0 <1>
} }
} }

View File

@ -23,14 +23,14 @@ PUT my_index
"manager": { <2> "manager": { <2>
"properties": { "properties": {
"age": { "type": "integer" }, "age": { "type": "integer" },
"name": { "type": "string" } "name": { "type": "text" }
} }
}, },
"employees": { <3> "employees": { <3>
"type": "nested", "type": "nested",
"properties": { "properties": {
"age": { "type": "integer" }, "age": { "type": "integer" },
"name": { "type": "string" } "name": { "type": "text" }
} }
} }
} }

View File

@ -41,7 +41,7 @@ PUT /my_index
"my_type": { "my_type": {
"properties": { "properties": {
"text": { "text": {
"type": "string", "type": "text",
"analyzer": "autocomplete", <2> "analyzer": "autocomplete", <2>
"search_analyzer": "standard" <2> "search_analyzer": "standard" <2>
} }

View File

@ -5,8 +5,8 @@ Elasticsearch allows you to configure a scoring algorithm or _similarity_ per
field. The `similarity` setting provides a simple way of choosing a similarity field. The `similarity` setting provides a simple way of choosing a similarity
algorithm other than the default TF/IDF, such as `BM25`. algorithm other than the default TF/IDF, such as `BM25`.
Similarities are mostly useful for <<string,`string`>> fields, especially Similarities are mostly useful for <<text,`text`>> fields, but can also apply
`analyzed` string fields, but can also apply to other field types. to other field types.
Custom similarities can be configured by tuning the parameters of the built-in Custom similarities can be configured by tuning the parameters of the built-in
similarities. For more details about this expert options, see the similarities. For more details about this expert options, see the
@ -37,10 +37,10 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"default_field": { <1> "default_field": { <1>
"type": "string" "type": "text"
}, },
"bm25_field": { "bm25_field": {
"type": "string", "type": "text",
"similarity": "BM25" <2> "similarity": "BM25" <2>
} }
} }

View File

@ -24,7 +24,7 @@ PUT /my_index
"my_type": { "my_type": {
"properties": { "properties": {
"title": { "title": {
"type": "string", "type": "text",
"store": true <1> "store": true <1>
}, },
"date": { "date": {
@ -32,7 +32,7 @@ PUT /my_index
"store": true <1> "store": true <1>
}, },
"content": { "content": {
"type": "string" "type": "text"
} }
} }
} }

View File

@ -35,7 +35,7 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"text": { "text": {
"type": "string", "type": "text",
"term_vector": "with_positions_offsets" "term_vector": "with_positions_offsets"
} }
} }

View File

@ -7,7 +7,7 @@ document:
[float] [float]
=== Core datatypes === Core datatypes
<<string>>:: `string` string:: <<text,`text`>> and <<keyword,`keyword`>>
<<number>>:: `long`, `integer`, `short`, `byte`, `double`, `float` <<number>>:: `long`, `integer`, `short`, `byte`, `double`, `float`
<<date>>:: `date` <<date>>:: `date`
<<boolean>>:: `boolean` <<boolean>>:: `boolean`
@ -45,9 +45,9 @@ Attachment datatype::
=== Multi-fields === Multi-fields
It is often useful to index the same field in different ways for different It is often useful to index the same field in different ways for different
purposes. For instance, a `string` field could be <<mapping-index,indexed>> as purposes. For instance, a `string` field could be mapped as
an `analyzed` field for full-text search, and as a `not_analyzed` field for a `text` field for full-text search, and as a `keyword` field for
sorting or aggregations. Alternatively, you could index a string field with sorting or aggregations. Alternatively, you could index a text field with
the <<analysis-standard-analyzer,`standard` analyzer>>, the the <<analysis-standard-analyzer,`standard` analyzer>>, the
<<english-analyzer,`english`>> analyzer, and the <<english-analyzer,`english`>> analyzer, and the
<<french-analyzer,`french` analyzer>>. <<french-analyzer,`french` analyzer>>.
@ -69,6 +69,8 @@ include::types/geo-shape.asciidoc[]
include::types/ip.asciidoc[] include::types/ip.asciidoc[]
include::types/keyword.asciidoc[]
include::types/nested.asciidoc[] include::types/nested.asciidoc[]
include::types/numeric.asciidoc[] include::types/numeric.asciidoc[]
@ -77,6 +79,8 @@ include::types/object.asciidoc[]
include::types/string.asciidoc[] include::types/string.asciidoc[]
include::types/text.asciidoc[]
include::types/token-count.asciidoc[] include::types/token-count.asciidoc[]

View File

@ -13,7 +13,7 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"name": { "name": {
"type": "string" "type": "text"
}, },
"blob": { "blob": {
"type": "binary" "type": "binary"

View File

@ -0,0 +1,111 @@
[[keyword]]
=== Keyword datatype
A field to index structured content such as email addresses, hostnames, status
codes, zip codes or tags.
They are typically used for filtering (_Find me all blog posts where
++status++ is ++published++_), for sorting, and for aggregations. Keyword
fields are ony searchable by their exact value.
If you need to index full text content such as email bodies or product
descriptions, it is likely that you should rather use a <<text,`text`>> field.
Below is an example of a mapping for a keyword field:
[source,js]
--------------------------------
PUT my_index
{
"mappings": {
"my_type": {
"properties": {
"tags": {
"type": "keyword"
}
}
}
}
}
--------------------------------
// AUTOSENSE
[[keyword-params]]
==== Parameters for keyword fields
The following parameters are accepted by `string` fields:
[horizontal]
<<mapping-boost,`boost`>>::
Mapping field-level query time boosting. Accepts a floating point number, defaults
to `1.0`.
<<doc-values,`doc_values`>>::
Should the field be stored on disk in a column-stride fashion, so that it
can later be used for sorting, aggregations, or scripting? Accepts `true`
(default) or `false`.
<<global-ordinals,`eager_global_ordinals`>>::
Should global ordinals be loaded eagerly on refresh? Accepts `true` or `false`
(default). Enabling this is a good idea on fields that are frequently used for
terms aggregations.
<<multi-fields,`fields`>>::
Multi-fields allow the same string value to be indexed in multiple ways for
different purposes, such as one field for search and a multi-field for
sorting and aggregations.
<<ignore-above,`ignore_above`>>::
Do not index or analyze any string longer than this value. Defaults to
`2147483647` so that all values would be accepted.
<<include-in-all,`include_in_all`>>::
Whether or not the field value should be included in the
<<mapping-all-field,`_all`>> field? Accepts `true` or `false`. Defaults
to `false` if <<mapping-index,`index`>> is set to `no`, or if a parent
<<object,`object`>> field sets `include_in_all` to `false`.
Otherwise defaults to `true`.
<<mapping-index,`index`>>::
Should the field be searchable? Accepts `true` (default) or `false`.
<<index-options,`index_options`>>::
What information should be stored in the index, for scoring purposes.
Defaults to `docs` but can also be set to `freqs` to take term frequency into account
when computing scores.
<<norms,`norms`>>::
Whether field-length should be taken into account when scoring queries.
Accepts `true` or `false` (default).
<<null-value,`null_value`>>::
Accepts a string value which is substituted for any explicit `null`
values. Defaults to `null`, which means the field is treated as missing.
<<mapping-store,`store`>>::
Whether the field value should be stored and retrievable separately from
the <<mapping-source-field,`_source`>> field. Accepts `true` or `false`
(default).
<<search-analyzer,`search_analyzer`>>::
The <<analyzer,`analyzer`>> that should be used at search time on
<<mapping-index,`analyzed`>> fields. Defaults to the `analyzer` setting.
<<similarity,`similarity`>>::
Which scoring algorithm or _similarity_ should be used. Defaults
to `classic`, which uses TF/IDF.

View File

@ -46,16 +46,15 @@ PUT my_index
"my_type": { <1> "my_type": { <1>
"properties": { "properties": {
"region": { "region": {
"type": "string", "type": "keyword"
"index": "not_analyzed"
}, },
"manager": { <2> "manager": { <2>
"properties": { "properties": {
"age": { "type": "integer" }, "age": { "type": "integer" },
"name": { <3> "name": { <3>
"properties": { "properties": {
"first": { "type": "string" }, "first": { "type": "text" },
"last": { "type": "string" } "last": { "type": "text" }
} }
} }
} }

View File

@ -1,179 +1,4 @@
[[string]] [[string]]
=== String datatype === String datatype
Fields of type `string` accept text values. Strings may be sub-divided into: NOTE: The `string` field has been removed in favor of the `text` and `keyword` fields.
Full text::
+
--
Full text values, like the body of an email, are typically used for text based
relevance searches, such as: _Find the most relevant documents that match a
query for "quick brown fox"_.
These fields are `analyzed`, that is they are passed through an
<<analysis,analyzer>> to convert the string into a list of individual terms
before being indexed. The analysis process allows Elasticsearch to search for
individual words _within_ each full text field. Full text fields are not
used for sorting and seldom used for aggregations (although the
<<search-aggregations-bucket-significantterms-aggregation,significant terms aggregation>> is a notable exception).
--
Keywords::
Keywords are exact values like email addresses, hostnames, status codes, or
tags. They are typically used for filtering (_Find me all blog posts where
++status++ is ++published++_), for sorting, and for aggregations. Keyword
fields are `not_analyzed`. Instead, the exact string value is added to the
index as a single term.
Below is an example of a mapping for a full text (`analyzed`) and a keyword
(`not_analyzed`) string field:
[source,js]
--------------------------------
PUT my_index
{
"mappings": {
"my_type": {
"properties": {
"full_name": { <1>
"type": "string"
},
"status": {
"type": "string", <2>
"index": "not_analyzed"
}
}
}
}
}
--------------------------------
// AUTOSENSE
<1> The `full_name` field is an `analyzed` full text field -- `index:analyzed` is the default.
<2> The `status` field is a `not_analyzed` keyword field.
Sometimes it is useful to have both a full text (`analyzed`) and a keyword
(`not_analyzed`) version of the same field: one for full text search and the
other for aggregations and sorting. This can be achieved with
<<multi-fields,multi-fields>>.
[[string-params]]
==== Parameters for string fields
The following parameters are accepted by `string` fields:
[horizontal]
<<analyzer,`analyzer`>>::
The <<analysis,analyzer>> which should be used for
<<mapping-index,`analyzed`>> string fields, both at index-time and at
search-time (unless overridden by the <<search-analyzer,`search_analyzer`>>).
Defaults to the default index analyzer, or the
<<analysis-standard-analyzer,`standard` analyzer>>.
<<mapping-boost,`boost`>>::
Mapping field-level query time boosting. Accepts a floating point number, defaults
to `1.0`.
<<doc-values,`doc_values`>>::
Should the field be stored on disk in a column-stride fashion, so that it
can later be used for sorting, aggregations, or scripting? Accepts `true`
or `false`. Defaults to `true` for `not_analyzed` fields. Analyzed fields
do not support doc values.
<<fielddata,`fielddata`>>::
Can the field use in-memory fielddata for sorting, aggregations,
or scripting? Accepts `disabled` or `paged_bytes` (default).
Not analyzed fields will use <<doc-values,doc values>> in preference
to fielddata.
<<multi-fields,`fields`>>::
Multi-fields allow the same string value to be indexed in multiple ways for
different purposes, such as one field for search and a multi-field for
sorting and aggregations, or the same string value analyzed by different
analyzers.
<<ignore-above,`ignore_above`>>::
Do not index or analyze any string longer than this value. Defaults to `0` (disabled).
<<include-in-all,`include_in_all`>>::
Whether or not the field value should be included in the
<<mapping-all-field,`_all`>> field? Accepts `true` or `false`. Defaults
to `false` if <<mapping-index,`index`>> is set to `no`, or if a parent
<<object,`object`>> field sets `include_in_all` to `false`.
Otherwise defaults to `true`.
<<mapping-index,`index`>>::
Should the field be searchable? Accepts `analyzed` (default, treat as full-text field),
`not_analyzed` (treat as keyword field) and `no`.
<<index-options,`index_options`>>::
What information should be stored in the index, for search and highlighting purposes.
Defaults to `positions` for <<mapping-index,`analyzed`>> fields, and to `docs` for
`not_analyzed` fields.
<<norms,`norms`>>::
+
--
Whether field-length should be taken into account when scoring queries.
Defaults depend on the <<mapping-index,`index`>> setting:
* `analyzed` fields default to `{ "enabled": true, "loading": "lazy" }`.
* `not_analyzed` fields default to `{ "enabled": false }`.
--
<<null-value,`null_value`>>::
Accepts a string value which is substituted for any explicit `null`
values. Defaults to `null`, which means the field is treated as missing.
If the field is `analyzed`, the `null_value` will also be analyzed.
<<position-increment-gap,`position_increment_gap`>>::
The number of fake term positions which should be inserted between
each element of an array of strings. Defaults to 0.
The number of fake term position which should be inserted between each
element of an array of strings. Defaults to the position_increment_gap
configured on the analyzer which defaults to 100. 100 was chosen because it
prevents phrase queries with reasonably large slops (less than 100) from
matching terms across field values.
<<mapping-store,`store`>>::
Whether the field value should be stored and retrievable separately from
the <<mapping-source-field,`_source`>> field. Accepts `true` or `false`
(default).
<<search-analyzer,`search_analyzer`>>::
The <<analyzer,`analyzer`>> that should be used at search time on
<<mapping-index,`analyzed`>> fields. Defaults to the `analyzer` setting.
<<search-quote-analyzer,`search_quote_analyzer`>>::
The <<analyzer,`analyzer`>> that should be used at search time when a
phrase is encountered. Defaults to the `search_analyzer` setting.
<<similarity,`similarity`>>::
Which scoring algorithm or _similarity_ should be used. Defaults
to `classic`, which uses TF/IDF.
<<term-vector,`term_vector`>>::
Whether term vectors should be stored for an <<mapping-index,`analyzed`>>
field. Defaults to `no`.

View File

@ -0,0 +1,139 @@
[[text]]
=== Text datatype
A field to index full-text values, such as the body of on email or the
description of a product. These fields are `analyzed`, that is they are passed through an
<<analysis,analyzer>> to convert the string into a list of individual terms
before being indexed. The analysis process allows Elasticsearch to search for
individual words _within_ each full text field. Text fields are not
used for sorting and seldom used for aggregations (although the
<<search-aggregations-bucket-significantterms-aggregation,significant terms aggregation>>
is a notable exception).
If you need to index structured content such as email addresses, hostnames, status
codes, or tags, it is likely that you should rather use a <<keyword,`keyword`>> field.
Below is an example of a mapping for a text field:
[source,js]
--------------------------------
PUT my_index
{
"mappings": {
"my_type": {
"properties": {
"full_name": {
"type": "text"
}
}
}
}
}
--------------------------------
// AUTOSENSE
Sometimes it is useful to have both a full text (`text`) and a keyword
(`keyword`) version of the same field: one for full text search and the
other for aggregations and sorting. This can be achieved with
<<multi-fields,multi-fields>>.
[[text-params]]
==== Parameters for text fields
The following parameters are accepted by `text` fields:
[horizontal]
<<analyzer,`analyzer`>>::
The <<analysis,analyzer>> which should be used for
<<mapping-index,`analyzed`>> string fields, both at index-time and at
search-time (unless overridden by the <<search-analyzer,`search_analyzer`>>).
Defaults to the default index analyzer, or the
<<analysis-standard-analyzer,`standard` analyzer>>.
<<mapping-boost,`boost`>>::
Mapping field-level query time boosting. Accepts a floating point number, defaults
to `1.0`.
<<global-ordinals,`eager_global_ordinals`>>::
Should global ordinals be loaded eagerly on refresh? Accepts `true` or `false`
(default). Enabling this is a good idea on fields that are frequently used for
(significant) terms aggregations.
<<fielddata,`fielddata`>>::
Can the field use in-memory fielddata for sorting, aggregations,
or scripting? Accepts `true` or `false` (default).
<<field-data-filtering,`fielddata_frequency_filter`>>::
Expert settings which allow to decide which values to load in memory when `fielddata`
is enabled. By default all values are loaded.
<<multi-fields,`fields`>>::
Multi-fields allow the same string value to be indexed in multiple ways for
different purposes, such as one field for search and a multi-field for
sorting and aggregations, or the same string value analyzed by different
analyzers.
<<include-in-all,`include_in_all`>>::
Whether or not the field value should be included in the
<<mapping-all-field,`_all`>> field? Accepts `true` or `false`. Defaults
to `false` if <<mapping-index,`index`>> is set to `no`, or if a parent
<<object,`object`>> field sets `include_in_all` to `false`.
Otherwise defaults to `true`.
<<mapping-index,`index`>>::
Should the field be searchable? Accepts `true` (default) or `false`.
<<index-options,`index_options`>>::
What information should be stored in the index, for search and highlighting purposes.
Defaults to `positions`.
<<norms,`norms`>>::
Whether field-length should be taken into account when scoring queries.
Accepts `true` (default) or `false`.
<<position-increment-gap,`position_increment_gap`>>::
The number of fake term positions which should be inserted between
each element of an array of strings. Defaults to 0.
The number of fake term position which should be inserted between each
element of an array of strings. Defaults to the position_increment_gap
configured on the analyzer which defaults to 100. 100 was chosen because it
prevents phrase queries with reasonably large slops (less than 100) from
matching terms across field values.
<<mapping-store,`store`>>::
Whether the field value should be stored and retrievable separately from
the <<mapping-source-field,`_source`>> field. Accepts `true` or `false`
(default).
<<search-analyzer,`search_analyzer`>>::
The <<analyzer,`analyzer`>> that should be used at search time on
<<mapping-index,`analyzed`>> fields. Defaults to the `analyzer` setting.
<<search-quote-analyzer,`search_quote_analyzer`>>::
The <<analyzer,`analyzer`>> that should be used at search time when a
phrase is encountered. Defaults to the `search_analyzer` setting.
<<similarity,`similarity`>>::
Which scoring algorithm or _similarity_ should be used. Defaults
to `classic`, which uses TF/IDF.
<<term-vector,`term_vector`>>::
Whether term vectors should be stored for an <<mapping-index,`analyzed`>>
field. Defaults to `no`.

View File

@ -15,7 +15,7 @@ PUT my_index
"my_type": { "my_type": {
"properties": { "properties": {
"name": { <1> "name": { <1>
"type": "string", "type": "text",
"fields": { "fields": {
"length": { <2> "length": { <2>
"type": "token_count", "type": "token_count",

View File

@ -225,3 +225,58 @@ The `addSuggestion` method now required the user specified suggestion name, prev
===== SuggestionBuilder ===== SuggestionBuilder
The `field` setter has been deleted. Instead the field name needs to be specified as constructor argument. The `field` setter has been deleted. Instead the field name needs to be specified as constructor argument.
==== SearchSourceBuilder
All methods which take an `XContentBuilder`, `BytesReference` `Map<String, Object>` or `bytes[]` have been removed in favor of providing the
relevant builder object for that feature (e.g. `HighlightBuilder`, `AggregationBuilder`, `SuggestBuilder`) . This means that all search requests
can now be validated at call time which results in much clearer errors.
The `defaultResourceWindowSize(int)` method has been removed. The window size should be set explicitly on all `RescoreBuilder` objects.
==== SearchRequestBuilder
All methods which take an `XContentBuilder`, `BytesReference` `Map<String, Object>` or `bytes[]` have been removed in favor of providing the
relevant builder object for that feature (e.g. `HighlightBuilder`, `AggregationBuilder`, `SuggestBuilder`) . This means that all search requests
can now be validated at call time which results in much clearer errors.
All highlighter methods have been removed in favor of a single `highlighter(HighlightBuilder)` method.
The `setExtraSource(SearchSourceBuilder)` method has been removed.
The `setTemplateSource(String)` and `setTemplateSource(BytesReference)` methods have been removed. Use `setTemplate(Template)` instead.
`setRescorer(Rescorer)` and `setRescorer(Rescorer, int)` have been removed infavor of `setRescorer(RescoreBuilder)` and `setRescorer(RescoreBuilder, int)`
==== SearchRequest
All `template` methods have been removed in favor of a single `template(Template)` method.
All `source` methods have been removed in favor of a single `source(SearchSourceBuilder)` method. This means that all search requests can now be validated
at call time which results in much clearer errors.
All `extraSource` methods have been removed.
==== AggregationBuilder
All methods which take an `XContentBuilder`, `BytesReference` `Map<String, Object>` or `bytes[]` have been removed in favor of providing the
relevant builder object (i.e. `subAggregation(AggregationBuilder)` or `subAggregation(PipelineAggregationBuilder)`). This means that all
requests can now be validated at call time which results in much clearer errors.
==== ValidateQueryRequest
`source(QuerySourceBuilder)`, `source(Map)`, `source(XContentBuilder)`, `source(String)`, `source(byte[])`, `source(byte[], int, int)`,
`source(BytesReference)` and `source()` have been removed in favor of using `query(QueryBuilder<?>)` and `query()`
==== ValidateQueryRequestBuilder
`setSource()` methods have been removed in favor of using `setQuery(QueryBuilder<?>)`
==== ExplainRequest
`source(QuerySourceBuilder)`, `source(Map)`, `source(BytesReference)` and `source()` have been removed in favor of using
`query(QueryBuilder<?>)` and `query()`
==== ExplainRequestBuilder
The `setQuery(BytesReference)` method have been removed in favor of using `setQuery(QueryBuilder<?>)`

Some files were not shown because too many files have changed in this diff Show More