mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 01:19:02 +00:00
Move from CamelCase to '_' casing, closes #116.
This commit is contained in:
parent
3f8acbd994
commit
3b5b4b4c3a
1
.idea/dictionaries/kimchy.xml
generated
1
.idea/dictionaries/kimchy.xml
generated
@ -50,6 +50,7 @@
|
||||
<w>stopwords</w>
|
||||
<w>streamable</w>
|
||||
<w>successul</w>
|
||||
<w>tagline</w>
|
||||
<w>throwable</w>
|
||||
<w>tika</w>
|
||||
<w>timestamp</w>
|
||||
|
@ -53,29 +53,27 @@ public class TransportDeleteAction extends TransportShardReplicationOperationAct
|
||||
TransportCreateIndexAction createIndexAction) {
|
||||
super(settings, transportService, clusterService, indicesService, threadPool, shardStateAction);
|
||||
this.createIndexAction = createIndexAction;
|
||||
this.autoCreateIndex = componentSettings.getAsBoolean("autoCreateIndex", true);
|
||||
this.autoCreateIndex = settings.getAsBoolean("action.auto_create_index", true);
|
||||
}
|
||||
|
||||
@Override protected void doExecute(final DeleteRequest deleteRequest, final ActionListener<DeleteResponse> listener) {
|
||||
if (autoCreateIndex) {
|
||||
if (!clusterService.state().metaData().hasConcreteIndex(deleteRequest.index())) {
|
||||
createIndexAction.execute(new CreateIndexRequest(deleteRequest.index()), new ActionListener<CreateIndexResponse>() {
|
||||
@Override public void onResponse(CreateIndexResponse result) {
|
||||
TransportDeleteAction.super.doExecute(deleteRequest, listener);
|
||||
}
|
||||
if (autoCreateIndex && !clusterService.state().metaData().hasConcreteIndex(deleteRequest.index())) {
|
||||
createIndexAction.execute(new CreateIndexRequest(deleteRequest.index()), new ActionListener<CreateIndexResponse>() {
|
||||
@Override public void onResponse(CreateIndexResponse result) {
|
||||
TransportDeleteAction.super.doExecute(deleteRequest, listener);
|
||||
}
|
||||
|
||||
@Override public void onFailure(Throwable e) {
|
||||
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
|
||||
// we have the index, do it
|
||||
TransportDeleteAction.super.doExecute(deleteRequest, listener);
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
@Override public void onFailure(Throwable e) {
|
||||
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
|
||||
// we have the index, do it
|
||||
TransportDeleteAction.super.doExecute(deleteRequest, listener);
|
||||
} else {
|
||||
listener.onFailure(e);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
super.doExecute(deleteRequest, listener);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
super.doExecute(deleteRequest, listener);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -69,8 +69,8 @@ public class TransportIndexAction extends TransportShardReplicationOperationActi
|
||||
super(settings, transportService, clusterService, indicesService, threadPool, shardStateAction);
|
||||
this.createIndexAction = createIndexAction;
|
||||
this.mappingUpdatedAction = mappingUpdatedAction;
|
||||
this.autoCreateIndex = settings.getAsBoolean("action.autoCreateIndex", true);
|
||||
this.allowIdGeneration = componentSettings.getAsBoolean("allowIdGeneration", true);
|
||||
this.autoCreateIndex = settings.getAsBoolean("action.auto_create_index", true);
|
||||
this.allowIdGeneration = componentSettings.getAsBoolean("allow_id_generation", true);
|
||||
}
|
||||
|
||||
@Override protected void doExecute(final IndexRequest indexRequest, final ActionListener<IndexResponse> listener) {
|
||||
|
@ -58,7 +58,7 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
private String[] fields;
|
||||
|
||||
private float percentTermsToMatch = -1;
|
||||
private int minTermFrequency = -1;
|
||||
private int minTermFreq = -1;
|
||||
private int maxQueryTerms = -1;
|
||||
private String[] stopWords = null;
|
||||
private int minDocFreq = -1;
|
||||
@ -165,16 +165,16 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
/**
|
||||
* The frequency below which terms will be ignored in the source doc. Defaults to <tt>2</tt>.
|
||||
*/
|
||||
public MoreLikeThisRequest minTermFrequency(int minTermFrequency) {
|
||||
this.minTermFrequency = minTermFrequency;
|
||||
public MoreLikeThisRequest minTermFreq(int minTermFreq) {
|
||||
this.minTermFreq = minTermFreq;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The frequency below which terms will be ignored in the source doc. Defaults to <tt>2</tt>.
|
||||
*/
|
||||
public int minTermFrequency() {
|
||||
return this.minTermFrequency;
|
||||
public int minTermFreq() {
|
||||
return this.minTermFreq;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -470,7 +470,7 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
}
|
||||
|
||||
percentTermsToMatch = in.readFloat();
|
||||
minTermFrequency = in.readVInt();
|
||||
minTermFreq = in.readVInt();
|
||||
maxQueryTerms = in.readVInt();
|
||||
size = in.readVInt();
|
||||
if (size > 0) {
|
||||
@ -539,7 +539,7 @@ public class MoreLikeThisRequest implements ActionRequest {
|
||||
}
|
||||
|
||||
out.writeFloat(percentTermsToMatch);
|
||||
out.writeVInt(minTermFrequency);
|
||||
out.writeVInt(minTermFreq);
|
||||
out.writeVInt(maxQueryTerms);
|
||||
if (stopWords == null) {
|
||||
out.writeVInt(0);
|
||||
|
@ -227,7 +227,7 @@ public class TransportMoreLikeThisAction extends BaseAction<MoreLikeThisRequest,
|
||||
.maxDocFreq(request.maxDocFreq())
|
||||
.minWordLen(request.minWordLen())
|
||||
.maxWordLen(request.maxWordLen())
|
||||
.minTermFrequency(request.minTermFrequency())
|
||||
.minTermFreq(request.minTermFreq())
|
||||
.maxQueryTerms(request.maxQueryTerms())
|
||||
.stopWords(request.stopWords());
|
||||
boolBuilder.should(mlt);
|
||||
|
@ -126,7 +126,7 @@ public class SearchResponse implements ActionResponse, ToJson {
|
||||
builder.startObject();
|
||||
if (shardFailure.shard() != null) {
|
||||
builder.field("index", shardFailure.shard().index());
|
||||
builder.field("shardId", shardFailure.shard().shardId());
|
||||
builder.field("shard", shardFailure.shard().shardId());
|
||||
}
|
||||
builder.field("reason", shardFailure.reason());
|
||||
builder.endObject();
|
||||
|
@ -45,7 +45,9 @@ import static org.elasticsearch.util.settings.ImmutableSettings.Builder.*;
|
||||
import static org.elasticsearch.util.settings.ImmutableSettings.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* A main entry point when starting from the command line.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class Bootstrap {
|
||||
|
||||
|
@ -84,7 +84,11 @@ public class TransportClientNodesService extends AbstractComponent implements Cl
|
||||
this.transportService = transportService;
|
||||
this.threadPool = threadPool;
|
||||
|
||||
this.nodesSamplerInterval = componentSettings.getAsTime("nodesSamplerInterval", timeValueSeconds(1));
|
||||
this.nodesSamplerInterval = componentSettings.getAsTime("nodes_sampler_interval", timeValueSeconds(1));
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("node_sampler_interval[" + nodesSamplerInterval + "]");
|
||||
}
|
||||
|
||||
this.nodesSamplerFuture = threadPool.scheduleWithFixedDelay(nodesSampler, nodesSamplerInterval);
|
||||
|
||||
|
@ -45,9 +45,9 @@ import static org.elasticsearch.util.settings.ImmutableSettings.*;
|
||||
@Immutable
|
||||
public class IndexMetaData {
|
||||
|
||||
public static final String SETTING_NUMBER_OF_SHARDS = "index.numberOfShards";
|
||||
public static final String SETTING_NUMBER_OF_SHARDS = "index.number_of_shards";
|
||||
|
||||
public static final String SETTING_NUMBER_OF_REPLICAS = "index.numberOfReplicas";
|
||||
public static final String SETTING_NUMBER_OF_REPLICAS = "index.number_of_replicas";
|
||||
|
||||
private final String index;
|
||||
|
||||
|
@ -43,7 +43,7 @@ public class DiscoveryService extends AbstractLifecycleComponent<DiscoveryServic
|
||||
@Inject public DiscoveryService(Settings settings, Discovery discovery) {
|
||||
super(settings);
|
||||
this.discovery = discovery;
|
||||
this.initialStateTimeout = componentSettings.getAsTime("initialStateTimeout", TimeValue.timeValueSeconds(30));
|
||||
this.initialStateTimeout = componentSettings.getAsTime("initial_state_timeout", TimeValue.timeValueSeconds(30));
|
||||
}
|
||||
|
||||
@Override protected void doStart() throws ElasticSearchException {
|
||||
|
@ -1,271 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.discovery.jgroups;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import org.elasticsearch.ElasticSearchException;
|
||||
import org.elasticsearch.ElasticSearchIllegalStateException;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.ProcessedClusterStateUpdateTask;
|
||||
import org.elasticsearch.cluster.node.Node;
|
||||
import org.elasticsearch.discovery.Discovery;
|
||||
import org.elasticsearch.discovery.DiscoveryException;
|
||||
import org.elasticsearch.discovery.InitialStateDiscoveryListener;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.util.component.AbstractLifecycleComponent;
|
||||
import org.elasticsearch.util.io.HostResolver;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
import org.jgroups.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.Inet4Address;
|
||||
import java.net.Inet6Address;
|
||||
import java.net.InetAddress;
|
||||
import java.net.URL;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CopyOnWriteArrayList;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import static com.google.common.collect.Maps.*;
|
||||
import static org.elasticsearch.cluster.ClusterState.*;
|
||||
import static org.elasticsearch.cluster.node.Nodes.*;
|
||||
|
||||
/**
|
||||
* A simplified discovery implementation based on JGroups that only works in client mode.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JgroupsClientDiscovery extends AbstractLifecycleComponent<Discovery> implements Discovery, Receiver {
|
||||
|
||||
private final ClusterName clusterName;
|
||||
|
||||
private final ThreadPool threadPool;
|
||||
|
||||
private final ClusterService clusterService;
|
||||
|
||||
private final Channel channel;
|
||||
|
||||
private volatile ScheduledFuture reconnectFuture;
|
||||
|
||||
private final AtomicBoolean initialStateSent = new AtomicBoolean();
|
||||
|
||||
private final CopyOnWriteArrayList<InitialStateDiscoveryListener> initialStateListeners = new CopyOnWriteArrayList<InitialStateDiscoveryListener>();
|
||||
|
||||
private final Node localNode = new Node("#client#", null); // dummy local node
|
||||
|
||||
@Inject public JgroupsClientDiscovery(Settings settings, Environment environment, ClusterName clusterName, ClusterService clusterService, ThreadPool threadPool) {
|
||||
super(settings);
|
||||
this.clusterName = clusterName;
|
||||
this.clusterService = clusterService;
|
||||
this.threadPool = threadPool;
|
||||
|
||||
String config = componentSettings.get("config", "udp");
|
||||
String actualConfig = config;
|
||||
if (!config.endsWith(".xml")) {
|
||||
actualConfig = "jgroups/" + config + ".xml";
|
||||
}
|
||||
URL configUrl = environment.resolveConfig(actualConfig);
|
||||
logger.debug("Using configuration [{}]", configUrl);
|
||||
|
||||
Map<String, String> sysPropsSet = newHashMap();
|
||||
try {
|
||||
// prepare system properties to configure jgroups based on the settings
|
||||
for (Map.Entry<String, String> entry : settings.getAsMap().entrySet()) {
|
||||
if (entry.getKey().startsWith("discovery.jgroups")) {
|
||||
String jgroupsKey = entry.getKey().substring("discovery.".length());
|
||||
if (System.getProperty(jgroupsKey) == null) {
|
||||
sysPropsSet.put(jgroupsKey, entry.getValue());
|
||||
System.setProperty(jgroupsKey, entry.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (System.getProperty("jgroups.bind_addr") == null) {
|
||||
// automatically set the bind address based on ElasticSearch default bindings...
|
||||
try {
|
||||
InetAddress bindAddress = HostResolver.resultBindHostAddress(null, settings, HostResolver.LOCAL_IP);
|
||||
if ((bindAddress instanceof Inet4Address && HostResolver.isIPv4()) || (bindAddress instanceof Inet6Address && !HostResolver.isIPv4())) {
|
||||
sysPropsSet.put("jgroups.bind_addr", bindAddress.getHostAddress());
|
||||
System.setProperty("jgroups.bind_addr", bindAddress.getHostAddress());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// ignore this
|
||||
}
|
||||
}
|
||||
|
||||
channel = new JChannel(configUrl);
|
||||
} catch (ChannelException e) {
|
||||
throw new DiscoveryException("Failed to create jgroups channel with config [" + configUrl + "]", e);
|
||||
} finally {
|
||||
for (String keyToRemove : sysPropsSet.keySet()) {
|
||||
System.getProperties().remove(keyToRemove);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override protected void doStart() throws ElasticSearchException {
|
||||
channel.setReceiver(this);
|
||||
try {
|
||||
channel.connect(clusterName.value());
|
||||
} catch (ChannelException e) {
|
||||
throw new DiscoveryException("Failed to connect to cluster [" + clusterName.value() + "]", e);
|
||||
}
|
||||
connectTillMasterIfNeeded();
|
||||
sendInitialStateEventIfNeeded();
|
||||
}
|
||||
|
||||
@Override protected void doStop() throws ElasticSearchException {
|
||||
if (reconnectFuture != null) {
|
||||
reconnectFuture.cancel(true);
|
||||
reconnectFuture = null;
|
||||
}
|
||||
if (channel.isConnected()) {
|
||||
channel.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
@Override protected void doClose() throws ElasticSearchException {
|
||||
if (channel.isOpen()) {
|
||||
channel.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void addListener(InitialStateDiscoveryListener listener) {
|
||||
initialStateListeners.add(listener);
|
||||
}
|
||||
|
||||
@Override public void removeListener(InitialStateDiscoveryListener listener) {
|
||||
initialStateListeners.remove(listener);
|
||||
}
|
||||
|
||||
@Override public void receive(Message msg) {
|
||||
if (msg.getSrc().equals(channel.getAddress())) {
|
||||
return; // my own message, ignore.
|
||||
}
|
||||
if (msg.getSrc().equals(channel.getView().getCreator())) {
|
||||
try {
|
||||
byte[] buffer = msg.getBuffer();
|
||||
final ClusterState origClusterState = ClusterState.Builder.fromBytes(buffer, settings, localNode);
|
||||
// remove the dummy local node
|
||||
final ClusterState clusterState = newClusterStateBuilder().state(origClusterState)
|
||||
.nodes(newNodesBuilder().putAll(origClusterState.nodes()).remove(localNode.id())).build();
|
||||
System.err.println("Nodes: " + clusterState.nodes().prettyPrint());
|
||||
clusterService.submitStateUpdateTask("jgroups-disco-receive(from master)", new ProcessedClusterStateUpdateTask() {
|
||||
@Override public ClusterState execute(ClusterState currentState) {
|
||||
return clusterState;
|
||||
}
|
||||
|
||||
@Override public void clusterStateProcessed(ClusterState clusterState) {
|
||||
sendInitialStateEventIfNeeded();
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
logger.error("Received corrupted cluster state.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void viewAccepted(View newView) {
|
||||
// we became master, reconnect
|
||||
if (channel.getAddress().equals(newView.getCreator())) {
|
||||
try {
|
||||
channel.disconnect();
|
||||
} catch (Exception e) {
|
||||
// ignore
|
||||
}
|
||||
if (!lifecycle.started()) {
|
||||
return;
|
||||
}
|
||||
connectTillMasterIfNeeded();
|
||||
}
|
||||
}
|
||||
|
||||
private void sendInitialStateEventIfNeeded() {
|
||||
if (initialStateSent.compareAndSet(false, true)) {
|
||||
for (InitialStateDiscoveryListener listener : initialStateListeners) {
|
||||
listener.initialStateProcessed();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override public String nodeDescription() {
|
||||
return "clientNode";
|
||||
}
|
||||
|
||||
@Override public void publish(ClusterState clusterState) {
|
||||
throw new ElasticSearchIllegalStateException("When in client mode, cluster state should not be published");
|
||||
}
|
||||
|
||||
@Override public boolean firstMaster() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override public byte[] getState() {
|
||||
return new byte[0];
|
||||
}
|
||||
|
||||
@Override public void setState(byte[] state) {
|
||||
}
|
||||
|
||||
@Override public void suspect(Address suspectedMember) {
|
||||
}
|
||||
|
||||
@Override public void block() {
|
||||
logger.warn("Blocked...");
|
||||
}
|
||||
|
||||
private void connectTillMasterIfNeeded() {
|
||||
Runnable command = new Runnable() {
|
||||
@Override public void run() {
|
||||
try {
|
||||
channel.connect(clusterName.value());
|
||||
if (isMaster()) {
|
||||
logger.debug("Act as master, reconnecting...");
|
||||
channel.disconnect();
|
||||
reconnectFuture = threadPool.schedule(this, 3, TimeUnit.SECONDS);
|
||||
} else {
|
||||
logger.debug("Reconnected not as master");
|
||||
reconnectFuture = null;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to connect to cluster", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (channel.isConnected()) {
|
||||
if (!isMaster()) {
|
||||
logger.debug("Connected not as master");
|
||||
return;
|
||||
}
|
||||
channel.disconnect();
|
||||
}
|
||||
reconnectFuture = threadPool.schedule(command, 3, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
private boolean isMaster() {
|
||||
return channel.getAddress().equals(channel.getView().getCreator());
|
||||
}
|
||||
}
|
@ -35,10 +35,6 @@ public class JgroupsDiscoveryModule extends AbstractModule {
|
||||
}
|
||||
|
||||
@Override protected void configure() {
|
||||
if (settings.getAsBoolean("discovery.client", false)) {
|
||||
bind(Discovery.class).to(JgroupsClientDiscovery.class).asEagerSingleton();
|
||||
} else {
|
||||
bind(Discovery.class).to(JgroupsDiscovery.class).asEagerSingleton();
|
||||
}
|
||||
bind(Discovery.class).to(JgroupsDiscovery.class).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ public class GatewayService extends AbstractLifecycleComponent<GatewayService> i
|
||||
this.discoveryService = discoveryService;
|
||||
this.threadPool = threadPool;
|
||||
this.metaDataService = metaDataService;
|
||||
this.initialStateTimeout = componentSettings.getAsTime("initialStateTimeout", TimeValue.timeValueSeconds(30));
|
||||
this.initialStateTimeout = componentSettings.getAsTime("initial_state_timeout", TimeValue.timeValueSeconds(30));
|
||||
}
|
||||
|
||||
@Override protected void doStart() throws ElasticSearchException {
|
||||
|
@ -96,16 +96,16 @@ public class NettyHttpServerTransport extends AbstractLifecycleComponent<HttpSer
|
||||
|
||||
@Inject public NettyHttpServerTransport(Settings settings) {
|
||||
super(settings);
|
||||
SizeValue maxContentLength = componentSettings.getAsSize("maxContentLength", new SizeValue(100, SizeUnit.MB));
|
||||
this.workerCount = componentSettings.getAsInt("workerCount", Runtime.getRuntime().availableProcessors());
|
||||
SizeValue maxContentLength = componentSettings.getAsSize("max_content_length", new SizeValue(100, SizeUnit.MB));
|
||||
this.workerCount = componentSettings.getAsInt("worker_count", Runtime.getRuntime().availableProcessors());
|
||||
this.port = componentSettings.get("port", "9200-9300");
|
||||
this.bindHost = componentSettings.get("bindHost");
|
||||
this.publishHost = componentSettings.get("publishHost");
|
||||
this.tcpNoDelay = componentSettings.getAsBoolean("tcpNoDelay", true);
|
||||
this.tcpKeepAlive = componentSettings.getAsBoolean("tcpKeepAlive", null);
|
||||
this.reuseAddress = componentSettings.getAsBoolean("reuseAddress", true);
|
||||
this.tcpSendBufferSize = componentSettings.getAsSize("tcpSendBufferSize", null);
|
||||
this.tcpReceiveBufferSize = componentSettings.getAsSize("tcpReceiveBufferSize", null);
|
||||
this.bindHost = componentSettings.get("bind_host");
|
||||
this.publishHost = componentSettings.get("publish_host");
|
||||
this.tcpNoDelay = componentSettings.getAsBoolean("tcp_no_delay", true);
|
||||
this.tcpKeepAlive = componentSettings.getAsBoolean("tcp_keep_alive", null);
|
||||
this.reuseAddress = componentSettings.getAsBoolean("reuse_address", true);
|
||||
this.tcpSendBufferSize = componentSettings.getAsSize("tcp_send_buffer_size", null);
|
||||
this.tcpReceiveBufferSize = componentSettings.getAsSize("tcp_receive_buffer_size", null);
|
||||
|
||||
// validate max content length
|
||||
if (maxContentLength.bytes() > Integer.MAX_VALUE) {
|
||||
|
@ -95,11 +95,11 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable
|
||||
if (!analyzerProviders.containsKey("default")) {
|
||||
analyzerProviders.put("default", new StandardAnalyzerProvider(index, indexSettings, "default", ImmutableSettings.Builder.EMPTY_SETTINGS));
|
||||
}
|
||||
if (!analyzerProviders.containsKey("defaultIndex")) {
|
||||
analyzerProviders.put("defaultIndex", analyzerProviders.get("default"));
|
||||
if (!analyzerProviders.containsKey("default_index")) {
|
||||
analyzerProviders.put("default_index", analyzerProviders.get("default"));
|
||||
}
|
||||
if (!analyzerProviders.containsKey("defaultSearch")) {
|
||||
analyzerProviders.put("defaultSearch", analyzerProviders.get("default"));
|
||||
if (!analyzerProviders.containsKey("default_search")) {
|
||||
analyzerProviders.put("default_search", analyzerProviders.get("default"));
|
||||
}
|
||||
|
||||
// extended analyzers defaults
|
||||
|
@ -51,7 +51,7 @@ public class BrazilianAnalyzerProvider extends AbstractAnalyzerProvider<Brazilia
|
||||
this.stopWords = BrazilianAnalyzer.getDefaultStopSet();
|
||||
}
|
||||
|
||||
String[] stemExclusion = settings.getAsArray("stemExclusion");
|
||||
String[] stemExclusion = settings.getAsArray("stem_exclusion");
|
||||
if (stemExclusion.length > 0) {
|
||||
this.stemExclusion = ImmutableSet.copyOf(Iterators.forArray(stemExclusion));
|
||||
} else {
|
||||
|
@ -40,7 +40,7 @@ public class BrazilianStemTokenFilterFactory extends AbstractTokenFilterFactory
|
||||
|
||||
@Inject public BrazilianStemTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name);
|
||||
String[] stemExclusion = settings.getAsArray("stemExclusion");
|
||||
String[] stemExclusion = settings.getAsArray("stem_exclusion");
|
||||
if (stemExclusion.length > 0) {
|
||||
this.exclusions = ImmutableSet.copyOf(Iterators.forArray(stemExclusion));
|
||||
} else {
|
||||
|
@ -51,7 +51,7 @@ public class DutchAnalyzerProvider extends AbstractAnalyzerProvider<DutchAnalyze
|
||||
this.stopWords = DutchAnalyzer.getDefaultStopSet();
|
||||
}
|
||||
|
||||
String[] stemExclusion = settings.getAsArray("stemExclusion");
|
||||
String[] stemExclusion = settings.getAsArray("stem_exclusion");
|
||||
if (stemExclusion.length > 0) {
|
||||
this.stemExclusion = ImmutableSet.copyOf(Iterators.forArray(stemExclusion));
|
||||
} else {
|
||||
|
@ -40,7 +40,7 @@ public class DutchStemTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
@Inject public DutchStemTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name);
|
||||
String[] stemExclusion = settings.getAsArray("stemExclusion");
|
||||
String[] stemExclusion = settings.getAsArray("stem_exclusion");
|
||||
if (stemExclusion.length > 0) {
|
||||
this.exclusions = ImmutableSet.copyOf(Iterators.forArray(stemExclusion));
|
||||
} else {
|
||||
|
@ -43,8 +43,8 @@ public class EdgeNGramTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
@Inject public EdgeNGramTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name);
|
||||
this.minGram = settings.getAsInt("minGram", NGramTokenFilter.DEFAULT_MIN_NGRAM_SIZE);
|
||||
this.maxGram = settings.getAsInt("maxGram", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE);
|
||||
this.minGram = settings.getAsInt("min_gram", NGramTokenFilter.DEFAULT_MIN_NGRAM_SIZE);
|
||||
this.maxGram = settings.getAsInt("max_gram", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE);
|
||||
this.side = EdgeNGramTokenFilter.Side.getSide(settings.get("side", EdgeNGramTokenizer.DEFAULT_SIDE.getLabel()));
|
||||
}
|
||||
|
||||
|
@ -43,8 +43,8 @@ public class EdgeNGramTokenizerFactory extends AbstractTokenizerFactory {
|
||||
|
||||
@Inject public EdgeNGramTokenizerFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name);
|
||||
this.minGram = settings.getAsInt("minGram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
|
||||
this.maxGram = settings.getAsInt("maxGram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
|
||||
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
|
||||
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
|
||||
this.side = EdgeNGramTokenizer.Side.getSide(settings.get("side", EdgeNGramTokenizer.DEFAULT_SIDE.getLabel()));
|
||||
}
|
||||
|
||||
|
@ -51,7 +51,7 @@ public class FrenchAnalyzerProvider extends AbstractAnalyzerProvider<FrenchAnaly
|
||||
this.stopWords = FrenchAnalyzer.getDefaultStopSet();
|
||||
}
|
||||
|
||||
String[] stemExclusion = settings.getAsArray("stemExclusion");
|
||||
String[] stemExclusion = settings.getAsArray("stem_exclusion");
|
||||
if (stemExclusion.length > 0) {
|
||||
this.stemExclusion = ImmutableSet.copyOf(Iterators.forArray(stemExclusion));
|
||||
} else {
|
||||
|
@ -40,7 +40,7 @@ public class FrenchStemTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
@Inject public FrenchStemTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name);
|
||||
String[] stemExclusion = settings.getAsArray("stemExclusion");
|
||||
String[] stemExclusion = settings.getAsArray("stem_exclusion");
|
||||
if (stemExclusion.length > 0) {
|
||||
this.exclusions = ImmutableSet.copyOf(Iterators.forArray(stemExclusion));
|
||||
} else {
|
||||
|
@ -51,7 +51,7 @@ public class GermanAnalyzerProvider extends AbstractAnalyzerProvider<GermanAnaly
|
||||
this.stopWords = GermanAnalyzer.getDefaultStopSet();
|
||||
}
|
||||
|
||||
String[] stemExclusion = settings.getAsArray("stemExclusion");
|
||||
String[] stemExclusion = settings.getAsArray("stem_exclusion");
|
||||
if (stemExclusion.length > 0) {
|
||||
this.stemExclusion = ImmutableSet.copyOf(Iterators.forArray(stemExclusion));
|
||||
} else {
|
||||
|
@ -40,7 +40,7 @@ public class GermanStemTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
@Inject public GermanStemTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name);
|
||||
String[] stemExclusion = settings.getAsArray("stemExclusion");
|
||||
String[] stemExclusion = settings.getAsArray("stem_exclusion");
|
||||
if (stemExclusion.length > 0) {
|
||||
this.exclusions = ImmutableSet.copyOf(Iterators.forArray(stemExclusion));
|
||||
} else {
|
||||
|
@ -38,7 +38,7 @@ public class KeywordTokenizerFactory extends AbstractTokenizerFactory {
|
||||
|
||||
@Inject public KeywordTokenizerFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name);
|
||||
bufferSize = settings.getAsInt("bufferSize", 256);
|
||||
bufferSize = settings.getAsInt("buffer_size", 256);
|
||||
}
|
||||
|
||||
@Override public Tokenizer create(Reader reader) {
|
||||
|
@ -40,8 +40,8 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
@Inject public NGramTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name);
|
||||
this.minGram = settings.getAsInt("minGram", NGramTokenFilter.DEFAULT_MIN_NGRAM_SIZE);
|
||||
this.maxGram = settings.getAsInt("maxGram", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE);
|
||||
this.minGram = settings.getAsInt("min_gram", NGramTokenFilter.DEFAULT_MIN_NGRAM_SIZE);
|
||||
this.maxGram = settings.getAsInt("max_gram", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE);
|
||||
}
|
||||
|
||||
@Override public TokenStream create(TokenStream tokenStream) {
|
||||
|
@ -40,8 +40,8 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
|
||||
|
||||
@Inject public NGramTokenizerFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name);
|
||||
this.minGram = settings.getAsInt("minGram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
|
||||
this.maxGram = settings.getAsInt("maxGram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
|
||||
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
|
||||
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
|
||||
}
|
||||
|
||||
@Override public Tokenizer create(Reader reader) {
|
||||
|
@ -38,8 +38,8 @@ public class ShingleTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
@Inject public ShingleTokenFilterFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name);
|
||||
maxShingleSize = settings.getAsInt("maxShingleSize", ShingleFilter.DEFAULT_MAX_SHINGLE_SIZE);
|
||||
outputUnigrams = settings.getAsBoolean("outputUnigrams", true);
|
||||
maxShingleSize = settings.getAsInt("max_shingle_size", ShingleFilter.DEFAULT_MAX_SHINGLE_SIZE);
|
||||
outputUnigrams = settings.getAsBoolean("output_unigrams", true);
|
||||
}
|
||||
|
||||
@Override public TokenStream create(TokenStream tokenStream) {
|
||||
|
@ -51,7 +51,7 @@ public class StandardAnalyzerProvider extends AbstractAnalyzerProvider<StandardA
|
||||
} else {
|
||||
this.stopWords = ImmutableSet.copyOf((Iterable<? extends String>) StopAnalyzer.ENGLISH_STOP_WORDS_SET);
|
||||
}
|
||||
maxTokenLength = settings.getAsInt("maxTokenLength", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH);
|
||||
maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH);
|
||||
standardAnalyzer = new StandardAnalyzer(Version.LUCENE_CURRENT, this.stopWords);
|
||||
standardAnalyzer.setMaxTokenLength(maxTokenLength);
|
||||
}
|
||||
|
@ -40,7 +40,7 @@ public class StandardTokenizerFactory extends AbstractTokenizerFactory {
|
||||
|
||||
@Inject public StandardTokenizerFactory(Index index, @IndexSettings Settings indexSettings, @Assisted String name, @Assisted Settings settings) {
|
||||
super(index, indexSettings, name);
|
||||
maxTokenLength = settings.getAsInt("maxTokenLength", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH);
|
||||
maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH);
|
||||
}
|
||||
|
||||
@Override public Tokenizer create(Reader reader) {
|
||||
|
@ -51,8 +51,8 @@ public class StopTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
} else {
|
||||
this.stopWords = ImmutableSet.copyOf((Iterable<? extends String>) StopAnalyzer.ENGLISH_STOP_WORDS_SET);
|
||||
}
|
||||
this.enablePositionIncrements = settings.getAsBoolean("enablePositionIncrements", true);
|
||||
this.ignoreCase = settings.getAsBoolean("ignoreCase", false);
|
||||
this.enablePositionIncrements = settings.getAsBoolean("enable_position_increments", true);
|
||||
this.ignoreCase = settings.getAsBoolean("ignore_case", false);
|
||||
}
|
||||
|
||||
@Override public TokenStream create(TokenStream tokenStream) {
|
||||
|
@ -52,7 +52,7 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
|
||||
protected AbstractConcurrentMapFilterCache(Index index, @IndexSettings Settings indexSettings, ThreadPool threadPool) {
|
||||
super(index, indexSettings);
|
||||
|
||||
this.readerCleanerSchedule = componentSettings.getAsTime("readerCleanerSchedule", TimeValue.timeValueMinutes(1));
|
||||
this.readerCleanerSchedule = componentSettings.getAsTime("reader_cleaner_schedule", TimeValue.timeValueMinutes(1));
|
||||
|
||||
logger.debug("Using [" + type() + "] filter cache with readerCleanerSchedule [{}]", readerCleanerSchedule);
|
||||
|
||||
|
@ -41,8 +41,8 @@ public class KeepLastNDeletionPolicy extends AbstractIndexShardComponent impleme
|
||||
|
||||
@Inject public KeepLastNDeletionPolicy(ShardId shardId, @IndexSettings Settings indexSettings) {
|
||||
super(shardId, indexSettings);
|
||||
this.numToKeep = componentSettings.getAsInt("numToKeep", 5);
|
||||
logger.debug("Using [KeepLastN] deletion policy with numToKeep [{}]", numToKeep);
|
||||
this.numToKeep = componentSettings.getAsInt("num_to_keep", 5);
|
||||
logger.debug("Using [KeepLastN] deletion policy with num_to_keep [{}]", numToKeep);
|
||||
}
|
||||
|
||||
public void onInit(List<? extends IndexCommit> commits) throws IOException {
|
||||
|
@ -107,9 +107,9 @@ public class RobinEngine extends AbstractIndexShardComponent implements Engine,
|
||||
Preconditions.checkNotNull(deletionPolicy, "Snapshot deletion policy must be provided to the engine");
|
||||
Preconditions.checkNotNull(translog, "Translog must be provided to the engine");
|
||||
|
||||
this.ramBufferSize = componentSettings.getAsSize("ramBufferSize", new SizeValue(64, SizeUnit.MB));
|
||||
this.refreshInterval = componentSettings.getAsTime("refreshInterval", timeValueSeconds(1));
|
||||
this.termIndexInterval = componentSettings.getAsInt("termIndexInterval", IndexWriter.DEFAULT_TERM_INDEX_INTERVAL);
|
||||
this.ramBufferSize = componentSettings.getAsSize("ram_buffer_size", new SizeValue(64, SizeUnit.MB));
|
||||
this.refreshInterval = componentSettings.getAsTime("refresh_interval", timeValueSeconds(1));
|
||||
this.termIndexInterval = componentSettings.getAsInt("term_index_interval", IndexWriter.DEFAULT_TERM_INDEX_INTERVAL);
|
||||
|
||||
this.store = store;
|
||||
this.deletionPolicy = deletionPolicy;
|
||||
@ -125,7 +125,7 @@ public class RobinEngine extends AbstractIndexShardComponent implements Engine,
|
||||
throw new EngineAlreadyStartedException(shardId);
|
||||
}
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Starting engine with ramBufferSize [" + ramBufferSize + "], refreshInterval [" + refreshInterval + "]");
|
||||
logger.debug("Starting engine with ram_buffer_size [" + ramBufferSize + "], refresh_interval [" + refreshInterval + "]");
|
||||
}
|
||||
IndexWriter indexWriter = null;
|
||||
try {
|
||||
|
@ -80,8 +80,8 @@ public class IndexShardGatewayService extends AbstractIndexShardComponent implem
|
||||
this.shardGateway = shardGateway;
|
||||
this.store = store;
|
||||
|
||||
this.snapshotOnClose = componentSettings.getAsBoolean("snapshotOnClose", true);
|
||||
this.snapshotInterval = componentSettings.getAsTime("snapshotInterval", TimeValue.timeValueSeconds(10));
|
||||
this.snapshotOnClose = componentSettings.getAsBoolean("snapshot_on_close", true);
|
||||
this.snapshotInterval = componentSettings.getAsTime("snapshot_interval", TimeValue.timeValueSeconds(10));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -92,7 +92,7 @@ public class MapperService extends AbstractIndexComponent implements Iterable<Do
|
||||
this.indexClassLoader = indexSettings.getClassLoader();
|
||||
|
||||
this.dynamic = componentSettings.getAsBoolean("dynamic", true);
|
||||
String dynamicMappingLocation = componentSettings.get("dynamicMappingLocation");
|
||||
String dynamicMappingLocation = componentSettings.get("dynamic_mapping_location");
|
||||
URL dynamicMappingUrl;
|
||||
if (dynamicMappingLocation == null) {
|
||||
try {
|
||||
|
@ -168,12 +168,12 @@ public class JsonAllFieldMapper extends JsonFieldMapper<Void> implements AllFiel
|
||||
builder.startObject(JSON_TYPE);
|
||||
builder.field("enabled", enabled);
|
||||
builder.field("store", store.name().toLowerCase());
|
||||
builder.field("termVector", termVector.name().toLowerCase());
|
||||
builder.field("term_vector", termVector.name().toLowerCase());
|
||||
if (indexAnalyzer != null && !indexAnalyzer.name().startsWith("_")) {
|
||||
builder.field("indexAnalyzer", indexAnalyzer.name());
|
||||
builder.field("index_analyzer", indexAnalyzer.name());
|
||||
}
|
||||
if (searchAnalyzer != null && !searchAnalyzer.name().startsWith("_")) {
|
||||
builder.field("searchAnalyzer", searchAnalyzer.name());
|
||||
builder.field("search_analyzer", searchAnalyzer.name());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ public class JsonBinaryFieldMapper extends JsonFieldMapper<byte[]> {
|
||||
@Override public void toJson(JsonBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(names.name());
|
||||
builder.field("type", jsonType());
|
||||
builder.field("indexName", names.indexNameClean());
|
||||
builder.field("index_name", names.indexNameClean());
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
@ -26,6 +26,7 @@ import org.codehaus.jackson.JsonToken;
|
||||
import org.codehaus.jackson.node.ObjectNode;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.util.Booleans;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
import org.elasticsearch.util.lucene.Lucene;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -182,4 +183,11 @@ public class JsonBooleanFieldMapper extends JsonFieldMapper<Boolean> {
|
||||
@Override protected String jsonType() {
|
||||
return JSON_TYPE;
|
||||
}
|
||||
|
||||
@Override protected void doJsonBody(JsonBuilder builder) throws IOException {
|
||||
super.doJsonBody(builder);
|
||||
if (nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -192,7 +192,7 @@ public class JsonBoostFieldMapper extends JsonNumberFieldMapper<Float> implement
|
||||
builder.startObject(jsonType());
|
||||
builder.field("name", name());
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
builder.field("null_value", nullValue);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -229,10 +229,10 @@ public class JsonDateFieldMapper extends JsonNumberFieldMapper<Long> {
|
||||
super.doJsonBody(builder);
|
||||
builder.field("format", dateTimeFormatter.format());
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
builder.field("null_value", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
builder.field("include_in_all", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
@ -132,9 +132,9 @@ public class JsonDocumentMapperParser implements DocumentMapperParser {
|
||||
docBuilder.boostField(parseBoostField((ObjectNode) fieldNode, parserContext));
|
||||
} else if (JsonAllFieldMapper.JSON_TYPE.equals(fieldName) || "allField".equals(fieldName)) {
|
||||
docBuilder.allField(parseAllField((ObjectNode) fieldNode, parserContext));
|
||||
} else if ("indexAnalyzer".equals(fieldName)) {
|
||||
} else if ("index_analyzer".equals(fieldName)) {
|
||||
docBuilder.indexAnalyzer(analysisService.analyzer(fieldNode.getTextValue()));
|
||||
} else if ("searchAnalyzer".equals(fieldName)) {
|
||||
} else if ("search_analyzer".equals(fieldName)) {
|
||||
docBuilder.searchAnalyzer(analysisService.analyzer(fieldNode.getTextValue()));
|
||||
} else if ("analyzer".equals(fieldName)) {
|
||||
docBuilder.indexAnalyzer(analysisService.analyzer(fieldNode.getTextValue()));
|
||||
@ -180,7 +180,7 @@ public class JsonDocumentMapperParser implements DocumentMapperParser {
|
||||
Map.Entry<String, JsonNode> entry = propsIt.next();
|
||||
String propName = entry.getKey();
|
||||
JsonNode propNode = entry.getValue();
|
||||
if (propName.equals("nullValue")) {
|
||||
if (propName.equals("null_value")) {
|
||||
builder.nullValue(nodeFloatValue(propNode));
|
||||
}
|
||||
}
|
||||
|
@ -211,10 +211,10 @@ public class JsonDoubleFieldMapper extends JsonNumberFieldMapper<Double> {
|
||||
@Override protected void doJsonBody(JsonBuilder builder) throws IOException {
|
||||
super.doJsonBody(builder);
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
builder.field("null_value", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
builder.field("include_in_all", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
@ -365,18 +365,18 @@ public abstract class JsonFieldMapper<T> implements FieldMapper<T>, JsonMapper {
|
||||
|
||||
protected void doJsonBody(JsonBuilder builder) throws IOException {
|
||||
builder.field("type", jsonType());
|
||||
builder.field("indexName", names.indexNameClean());
|
||||
builder.field("index_name", names.indexNameClean());
|
||||
builder.field("index", index.name().toLowerCase());
|
||||
builder.field("store", store.name().toLowerCase());
|
||||
builder.field("termVector", termVector.name().toLowerCase());
|
||||
builder.field("term_vector", termVector.name().toLowerCase());
|
||||
builder.field("boost", boost);
|
||||
builder.field("omitNorms", omitNorms);
|
||||
builder.field("omitTermFreqAndPositions", omitTermFreqAndPositions);
|
||||
builder.field("omit_norms", omitNorms);
|
||||
builder.field("omit_term_freq_and_positions", omitTermFreqAndPositions);
|
||||
if (indexAnalyzer != null && !indexAnalyzer.name().startsWith("_")) {
|
||||
builder.field("indexAnalyzer", indexAnalyzer.name());
|
||||
builder.field("index_analyzer", indexAnalyzer.name());
|
||||
}
|
||||
if (searchAnalyzer != null && !searchAnalyzer.name().startsWith("_")) {
|
||||
builder.field("searchAnalyzer", searchAnalyzer.name());
|
||||
builder.field("search_analyzer", searchAnalyzer.name());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -209,10 +209,10 @@ public class JsonFloatFieldMapper extends JsonNumberFieldMapper<Float> {
|
||||
@Override protected void doJsonBody(JsonBuilder builder) throws IOException {
|
||||
super.doJsonBody(builder);
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
builder.field("null_value", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
builder.field("include_in_all", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
@ -209,10 +209,10 @@ public class JsonIntegerFieldMapper extends JsonNumberFieldMapper<Integer> {
|
||||
@Override protected void doJsonBody(JsonBuilder builder) throws IOException {
|
||||
super.doJsonBody(builder);
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
builder.field("null_value", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
builder.field("include_in_all", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -209,10 +209,10 @@ public class JsonLongFieldMapper extends JsonNumberFieldMapper<Long> {
|
||||
@Override protected void doJsonBody(JsonBuilder builder) throws IOException {
|
||||
super.doJsonBody(builder);
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
builder.field("null_value", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
builder.field("include_in_all", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
@ -144,7 +144,7 @@ public abstract class JsonNumberFieldMapper<T extends Number> extends JsonFieldM
|
||||
|
||||
@Override protected void doJsonBody(JsonBuilder builder) throws IOException {
|
||||
super.doJsonBody(builder);
|
||||
builder.field("precisionStep", precisionStep);
|
||||
builder.field("precision_step", precisionStep);
|
||||
}
|
||||
|
||||
@Override public abstract int sortType();
|
||||
|
@ -498,11 +498,11 @@ public class JsonObjectMapper implements JsonMapper, JsonIncludeInAllMapper {
|
||||
builder.field("enabled", enabled);
|
||||
builder.field("path", pathType.name().toLowerCase());
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
builder.field("include_in_all", includeInAll);
|
||||
}
|
||||
|
||||
if (dateTimeFormatters.length > 0) {
|
||||
builder.startArray("dateFormats");
|
||||
builder.startArray("date_formats");
|
||||
for (FormatDateTimeFormatter dateTimeFormatter : dateTimeFormatters) {
|
||||
builder.value(dateTimeFormatter.format());
|
||||
}
|
||||
|
@ -209,10 +209,10 @@ public class JsonShortFieldMapper extends JsonNumberFieldMapper<Short> {
|
||||
@Override protected void doJsonBody(JsonBuilder builder) throws IOException {
|
||||
super.doJsonBody(builder);
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
builder.field("null_value", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
builder.field("include_in_all", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
@ -155,10 +155,10 @@ public class JsonStringFieldMapper extends JsonFieldMapper<String> implements Js
|
||||
@Override protected void doJsonBody(JsonBuilder builder) throws IOException {
|
||||
super.doJsonBody(builder);
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
builder.field("null_value", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
builder.field("include_in_all", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ public class JsonTypeParsers {
|
||||
builder.boost(nodeFloatValue(propNode));
|
||||
} else if (propName.equals("omitNorms") || propName.equals("omit_norms")) {
|
||||
builder.omitNorms(nodeBooleanValue(propNode));
|
||||
} else if (propName.equals("omitTermFreqAndPositions") || propName.equals("omit_termFreq_and_positions")) {
|
||||
} else if (propName.equals("omitTermFreqAndPositions") || propName.equals("omit_term_freq_and_positions")) {
|
||||
builder.omitTermFreqAndPositions(nodeBooleanValue(propNode));
|
||||
} else if (propName.equals("indexAnalyzer") || propName.equals("index_analyzer")) {
|
||||
builder.indexAnalyzer(parserContext.analysisService().analyzer(propNode.getTextValue()));
|
||||
|
@ -48,16 +48,16 @@ public class BalancedSegmentMergePolicyProvider extends AbstractIndexShardCompon
|
||||
super(store.shardId(), store.indexSettings());
|
||||
Preconditions.checkNotNull(store, "Store must be provided to merge policy");
|
||||
|
||||
this.minMergeSize = componentSettings.getAsSize("minMergeSize", new SizeValue((long) LogByteSizeMergePolicy.DEFAULT_MIN_MERGE_MB * 1024 * 1024, SizeUnit.BYTES));
|
||||
this.maxMergeSize = componentSettings.getAsSize("maxMergeSize", new SizeValue((long) LogByteSizeMergePolicy.DEFAULT_MAX_MERGE_MB, SizeUnit.MB));
|
||||
this.mergeFactor = componentSettings.getAsInt("mergeFactor", LogByteSizeMergePolicy.DEFAULT_MERGE_FACTOR);
|
||||
this.maxMergeDocs = componentSettings.getAsInt("maxMergeDocs", LogByteSizeMergePolicy.DEFAULT_MAX_MERGE_DOCS);
|
||||
this.numLargeSegments = componentSettings.getAsInt("numLargeSegments", BalancedSegmentMergePolicy.DEFAULT_NUM_LARGE_SEGMENTS);
|
||||
this.maxSmallSegments = componentSettings.getAsInt("maxSmallSegments", 2 * LogMergePolicy.DEFAULT_MERGE_FACTOR);
|
||||
this.minMergeSize = componentSettings.getAsSize("min_merge_size", new SizeValue((long) LogByteSizeMergePolicy.DEFAULT_MIN_MERGE_MB * 1024 * 1024, SizeUnit.BYTES));
|
||||
this.maxMergeSize = componentSettings.getAsSize("max_merge_size", new SizeValue((long) LogByteSizeMergePolicy.DEFAULT_MAX_MERGE_MB, SizeUnit.MB));
|
||||
this.mergeFactor = componentSettings.getAsInt("merge_factor", LogByteSizeMergePolicy.DEFAULT_MERGE_FACTOR);
|
||||
this.maxMergeDocs = componentSettings.getAsInt("max_merge_docs", LogByteSizeMergePolicy.DEFAULT_MAX_MERGE_DOCS);
|
||||
this.numLargeSegments = componentSettings.getAsInt("num_large_segments", BalancedSegmentMergePolicy.DEFAULT_NUM_LARGE_SEGMENTS);
|
||||
this.maxSmallSegments = componentSettings.getAsInt("max_small_segments", 2 * LogMergePolicy.DEFAULT_MERGE_FACTOR);
|
||||
|
||||
this.useCompoundFile = componentSettings.getAsBoolean("useCompoundFile", store == null || store.suggestUseCompoundFile());
|
||||
this.useCompoundFile = componentSettings.getAsBoolean("use_compound_file", store == null || store.suggestUseCompoundFile());
|
||||
|
||||
logger.debug("Using [Balanced] merge policy with mergeFactor[{}], minMergeSize[{}], maxMergeSize[{}], maxMergeDocs[{}] useCompoundFile[{}]",
|
||||
logger.debug("Using [Balanced] merge policy with merge_factor[{}], min_merge_size[{}], max_merge_size[{}], max_merge_docs[{}] use_compound_file[{}]",
|
||||
new Object[]{mergeFactor, minMergeSize, maxMergeSize, maxMergeDocs, useCompoundFile});
|
||||
}
|
||||
|
||||
|
@ -46,13 +46,13 @@ public class LogByteSizeMergePolicyProvider extends AbstractIndexShardComponent
|
||||
super(store.shardId(), store.indexSettings());
|
||||
Preconditions.checkNotNull(store, "Store must be provided to merge policy");
|
||||
|
||||
this.minMergeSize = componentSettings.getAsSize("minMergeSize", new SizeValue((long) LogByteSizeMergePolicy.DEFAULT_MIN_MERGE_MB * 1024 * 1024, SizeUnit.BYTES));
|
||||
this.maxMergeSize = componentSettings.getAsSize("maxMergeSize", new SizeValue((long) LogByteSizeMergePolicy.DEFAULT_MAX_MERGE_MB, SizeUnit.MB));
|
||||
this.mergeFactor = componentSettings.getAsInt("mergeFactor", LogByteSizeMergePolicy.DEFAULT_MERGE_FACTOR);
|
||||
this.maxMergeDocs = componentSettings.getAsInt("maxMergeDocs", LogByteSizeMergePolicy.DEFAULT_MAX_MERGE_DOCS);
|
||||
this.calibrateSizeByDeletes = componentSettings.getAsBoolean("calibrateSizeByDeletes", false);
|
||||
this.useCompoundFile = componentSettings.getAsBoolean("useCompoundFile", store == null || store.suggestUseCompoundFile());
|
||||
logger.debug("Using [LogByteSize] merge policy with mergeFactor[{}], minMergeSize[{}], maxMergeSize[{}], maxMergeDocs[{}] useCompoundFile[{}], calibrateSizeByDeletes[{}]",
|
||||
this.minMergeSize = componentSettings.getAsSize("min_merge_size", new SizeValue((long) LogByteSizeMergePolicy.DEFAULT_MIN_MERGE_MB * 1024 * 1024, SizeUnit.BYTES));
|
||||
this.maxMergeSize = componentSettings.getAsSize("max_merge_size", new SizeValue((long) LogByteSizeMergePolicy.DEFAULT_MAX_MERGE_MB, SizeUnit.MB));
|
||||
this.mergeFactor = componentSettings.getAsInt("merge_factor", LogByteSizeMergePolicy.DEFAULT_MERGE_FACTOR);
|
||||
this.maxMergeDocs = componentSettings.getAsInt("max_merge_docs", LogByteSizeMergePolicy.DEFAULT_MAX_MERGE_DOCS);
|
||||
this.calibrateSizeByDeletes = componentSettings.getAsBoolean("calibrate_size_by_deletes", false);
|
||||
this.useCompoundFile = componentSettings.getAsBoolean("use_compound_file", store == null || store.suggestUseCompoundFile());
|
||||
logger.debug("Using [LogByteSize] merge policy with merge_factor[{}], min_merge_size[{}], max_merge_size[{}], max_merge_docs[{}] use_compound_file[{}], calibrate_size_by_deletes[{}]",
|
||||
new Object[]{mergeFactor, minMergeSize, maxMergeSize, maxMergeDocs, useCompoundFile, calibrateSizeByDeletes});
|
||||
}
|
||||
|
||||
|
@ -43,12 +43,12 @@ public class LogDocMergePolicyProvider extends AbstractIndexShardComponent imple
|
||||
super(store.shardId(), store.indexSettings());
|
||||
Preconditions.checkNotNull(store, "Store must be provided to merge policy");
|
||||
|
||||
this.minMergeDocs = componentSettings.getAsInt("minMergeDocs", LogDocMergePolicy.DEFAULT_MIN_MERGE_DOCS);
|
||||
this.maxMergeDocs = componentSettings.getAsInt("maxMergeDocs", LogDocMergePolicy.DEFAULT_MAX_MERGE_DOCS);
|
||||
this.mergeFactor = componentSettings.getAsInt("mergeFactor", LogDocMergePolicy.DEFAULT_MERGE_FACTOR);
|
||||
this.calibrateSizeByDeletes = componentSettings.getAsBoolean("calibrateSizeByDeletes", false);
|
||||
this.useCompoundFile = componentSettings.getAsBoolean("useCompoundFile", store == null || store.suggestUseCompoundFile());
|
||||
logger.debug("Using [LogDoc] merge policy with mergeFactor[{}] minMergeDocs[{}], maxMergeDocs[{}], useCompoundFile[{}], calibrateSizeByDeletes[{}]",
|
||||
this.minMergeDocs = componentSettings.getAsInt("min_merge_docs", LogDocMergePolicy.DEFAULT_MIN_MERGE_DOCS);
|
||||
this.maxMergeDocs = componentSettings.getAsInt("max_merge_docs", LogDocMergePolicy.DEFAULT_MAX_MERGE_DOCS);
|
||||
this.mergeFactor = componentSettings.getAsInt("merge_factor", LogDocMergePolicy.DEFAULT_MERGE_FACTOR);
|
||||
this.calibrateSizeByDeletes = componentSettings.getAsBoolean("calibrate_size_by_deletes", false);
|
||||
this.useCompoundFile = componentSettings.getAsBoolean("use_compound_file", store == null || store.suggestUseCompoundFile());
|
||||
logger.debug("Using [LogDoc] merge policy with merge_factor[{}] min_merge_docs[{}], max_merge_docs[{}], use_compound_file[{}], calibrate_size_by_deletes[{}]",
|
||||
new Object[]{mergeFactor, minMergeDocs, maxMergeDocs, useCompoundFile, calibrateSizeByDeletes});
|
||||
}
|
||||
|
||||
|
@ -39,8 +39,8 @@ public class ConcurrentMergeSchedulerProvider extends AbstractIndexShardComponen
|
||||
@Inject public ConcurrentMergeSchedulerProvider(ShardId shardId, @IndexSettings Settings indexSettings) {
|
||||
super(shardId, indexSettings);
|
||||
|
||||
this.maxThreadCount = componentSettings.getAsInt("maxThreadCount", 1);
|
||||
logger.debug("Using [concurrent] merge scheduler with maxThreadCount[{}]", maxThreadCount);
|
||||
this.maxThreadCount = componentSettings.getAsInt("max_thread_count", 1);
|
||||
logger.debug("Using [concurrent] merge scheduler with max_thread_count[{}]", maxThreadCount);
|
||||
}
|
||||
|
||||
@Override public MergeScheduler newMergeScheduler() {
|
||||
|
@ -67,7 +67,7 @@ public class BoolJsonFilterBuilder extends BaseJsonQueryBuilder {
|
||||
builder.field("must");
|
||||
clause.filterBuilder.toJson(builder, params);
|
||||
} else if (clause.occur == BooleanClause.Occur.MUST_NOT) {
|
||||
builder.field("mustNot");
|
||||
builder.field("must_not");
|
||||
clause.filterBuilder.toJson(builder, params);
|
||||
} else if (clause.occur == BooleanClause.Occur.SHOULD) {
|
||||
builder.field("should");
|
||||
|
@ -63,7 +63,7 @@ public class BoolJsonFilterParser extends AbstractIndexComponent implements Json
|
||||
} else if (token == JsonToken.START_OBJECT) {
|
||||
if ("must".equals(currentFieldName)) {
|
||||
clauses.add(new FilterClause(parseContext.parseInnerFilter(), BooleanClause.Occur.MUST));
|
||||
} else if ("mustNot".equals(currentFieldName)) {
|
||||
} else if ("must_not".equals(currentFieldName)) {
|
||||
clauses.add(new FilterClause(parseContext.parseInnerFilter(), BooleanClause.Occur.MUST_NOT));
|
||||
} else if ("should".equals(currentFieldName)) {
|
||||
clauses.add(new FilterClause(parseContext.parseInnerFilter(), BooleanClause.Occur.SHOULD));
|
||||
@ -73,7 +73,7 @@ public class BoolJsonFilterParser extends AbstractIndexComponent implements Json
|
||||
while ((token = jp.nextToken()) != JsonToken.END_ARRAY) {
|
||||
clauses.add(new FilterClause(parseContext.parseInnerFilter(), BooleanClause.Occur.MUST));
|
||||
}
|
||||
} else if ("mustNot".equals(currentFieldName)) {
|
||||
} else if ("must_not".equals(currentFieldName)) {
|
||||
while ((token = jp.nextToken()) != JsonToken.END_ARRAY) {
|
||||
clauses.add(new FilterClause(parseContext.parseInnerFilter(), BooleanClause.Occur.MUST_NOT));
|
||||
}
|
||||
|
@ -118,7 +118,7 @@ public class BoolJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
builder.field("must");
|
||||
clause.queryBuilder.toJson(builder, params);
|
||||
} else if (clause.occur == BooleanClause.Occur.MUST_NOT) {
|
||||
builder.field("mustNot");
|
||||
builder.field("must_not");
|
||||
clause.queryBuilder.toJson(builder, params);
|
||||
} else if (clause.occur == BooleanClause.Occur.SHOULD) {
|
||||
builder.field("should");
|
||||
@ -129,10 +129,10 @@ public class BoolJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
if (disableCoord != null) {
|
||||
builder.field("disableCoord", disableCoord);
|
||||
builder.field("disable_coord", disableCoord);
|
||||
}
|
||||
if (minimumNumberShouldMatch != -1) {
|
||||
builder.field("minimumNumberShouldMatch", minimumNumberShouldMatch);
|
||||
builder.field("minimum_number_should_match", minimumNumberShouldMatch);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ public class BoolJsonQueryParser extends AbstractIndexComponent implements JsonQ
|
||||
} else if (token == JsonToken.START_OBJECT) {
|
||||
if ("must".equals(currentFieldName)) {
|
||||
clauses.add(new BooleanClause(parseContext.parseInnerQuery(), BooleanClause.Occur.MUST));
|
||||
} else if ("mustNot".equals(currentFieldName)) {
|
||||
} else if ("must_not".equals(currentFieldName)) {
|
||||
clauses.add(new BooleanClause(parseContext.parseInnerQuery(), BooleanClause.Occur.MUST_NOT));
|
||||
} else if ("should".equals(currentFieldName)) {
|
||||
clauses.add(new BooleanClause(parseContext.parseInnerQuery(), BooleanClause.Occur.SHOULD));
|
||||
@ -78,7 +78,7 @@ public class BoolJsonQueryParser extends AbstractIndexComponent implements JsonQ
|
||||
while ((token = jp.nextToken()) != JsonToken.END_ARRAY) {
|
||||
clauses.add(new BooleanClause(parseContext.parseInnerQuery(), BooleanClause.Occur.MUST));
|
||||
}
|
||||
} else if ("mustNot".equals(currentFieldName)) {
|
||||
} else if ("must_not".equals(currentFieldName)) {
|
||||
while ((token = jp.nextToken()) != JsonToken.END_ARRAY) {
|
||||
clauses.add(new BooleanClause(parseContext.parseInnerQuery(), BooleanClause.Occur.MUST_NOT));
|
||||
}
|
||||
@ -88,13 +88,13 @@ public class BoolJsonQueryParser extends AbstractIndexComponent implements JsonQ
|
||||
}
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_TRUE || token == JsonToken.VALUE_FALSE) {
|
||||
if ("disableCoord".equals(currentFieldName)) {
|
||||
if ("disable_coord".equals(currentFieldName)) {
|
||||
disableCoord = token == JsonToken.VALUE_TRUE;
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_INT) {
|
||||
if ("disableCoord".equals(currentFieldName)) {
|
||||
if ("disable_coord".equals(currentFieldName)) {
|
||||
disableCoord = jp.getIntValue() != 0;
|
||||
} else if ("minimumNumberShouldMatch".equals(currentFieldName)) {
|
||||
} else if ("minimum_number_should_match".equals(currentFieldName)) {
|
||||
minimumNumberShouldMatch = jp.getIntValue();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = jp.getIntValue();
|
||||
@ -104,9 +104,9 @@ public class BoolJsonQueryParser extends AbstractIndexComponent implements JsonQ
|
||||
boost = jp.getFloatValue();
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
if ("disableCoord".equals(currentFieldName)) {
|
||||
if ("disable_coord".equals(currentFieldName)) {
|
||||
disableCoord = Booleans.parseBoolean(jp.getText(), false);
|
||||
} else if ("minimumNumberShouldMatch".equals(currentFieldName)) {
|
||||
} else if ("minimum_number_should_match".equals(currentFieldName)) {
|
||||
minimumNumberShouldMatch = Integer.parseInt(jp.getText());
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = Float.parseFloat(jp.getText());
|
||||
|
@ -38,7 +38,7 @@ import java.io.IOException;
|
||||
*/
|
||||
public class ConstantScoreQueryJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "constantScore";
|
||||
public static final String NAME = "constant_score";
|
||||
|
||||
@Inject public ConstantScoreQueryJsonQueryParser(Index index, @IndexSettings Settings settings) {
|
||||
super(index, settings);
|
||||
|
@ -70,9 +70,9 @@ public class DisMaxJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
}
|
||||
|
||||
@Override protected void doJson(JsonBuilder builder, Params params) throws IOException {
|
||||
builder.startObject("disMax");
|
||||
builder.startObject(DisMaxJsonQueryParser.NAME);
|
||||
if (tieBreaker != -1) {
|
||||
builder.field("tieBreaker", tieBreaker);
|
||||
builder.field("tie_breaker", tieBreaker);
|
||||
}
|
||||
if (boost != -1) {
|
||||
builder.field("boost", boost);
|
||||
|
@ -40,12 +40,14 @@ import static com.google.common.collect.Lists.*;
|
||||
*/
|
||||
public class DisMaxJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "dis_max";
|
||||
|
||||
@Inject public DisMaxJsonQueryParser(Index index, @IndexSettings Settings settings) {
|
||||
super(index, settings);
|
||||
}
|
||||
|
||||
@Override public String name() {
|
||||
return "disMax";
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override public Query parse(JsonQueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
@ -79,7 +81,7 @@ public class DisMaxJsonQueryParser extends AbstractIndexComponent implements Jso
|
||||
} else {
|
||||
boost = jp.getFloatValue();
|
||||
}
|
||||
} else if ("tieBreaker".equals(currentFieldName)) {
|
||||
} else if ("tie_breaker".equals(currentFieldName)) {
|
||||
if (token == JsonToken.VALUE_STRING) {
|
||||
tieBreaker = Float.parseFloat(jp.getText());
|
||||
} else {
|
||||
|
@ -238,31 +238,31 @@ public class FieldJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
builder.startObject(name);
|
||||
builder.field("query", query);
|
||||
if (defaultOperator != null) {
|
||||
builder.field("defaultOperator", defaultOperator.name().toLowerCase());
|
||||
builder.field("default_operator", defaultOperator.name().toLowerCase());
|
||||
}
|
||||
if (analyzer != null) {
|
||||
builder.field("analyzer", analyzer);
|
||||
}
|
||||
if (allowLeadingWildcard != null) {
|
||||
builder.field("allowLeadingWildcard", allowLeadingWildcard);
|
||||
builder.field("allow_leading_wildcard", allowLeadingWildcard);
|
||||
}
|
||||
if (lowercaseExpandedTerms != null) {
|
||||
builder.field("lowercaseExpandedTerms", lowercaseExpandedTerms);
|
||||
builder.field("lowercase_expanded_terms", lowercaseExpandedTerms);
|
||||
}
|
||||
if (enablePositionIncrements != null) {
|
||||
builder.field("enablePositionIncrements", enablePositionIncrements);
|
||||
builder.field("enable_position_increments", enablePositionIncrements);
|
||||
}
|
||||
if (fuzzyMinSim != -1) {
|
||||
builder.field("fuzzyMinSim", fuzzyMinSim);
|
||||
builder.field("fuzzy_min_sim", fuzzyMinSim);
|
||||
}
|
||||
if (boost != -1) {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
if (fuzzyPrefixLength != -1) {
|
||||
builder.field("fuzzyPrefixLength", fuzzyPrefixLength);
|
||||
builder.field("fuzzy_prefix_length", fuzzyPrefixLength);
|
||||
}
|
||||
if (phraseSlop != -1) {
|
||||
builder.field("phraseSlop", phraseSlop);
|
||||
builder.field("phrase_slop", phraseSlop);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -86,15 +86,15 @@ public class FieldJsonQueryParser extends AbstractIndexComponent implements Json
|
||||
queryString = jp.getText();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = Float.parseFloat(jp.getText());
|
||||
} else if ("enablePositionIncrements".equals(currentFieldName)) {
|
||||
} else if ("enable_position_increments".equals(currentFieldName)) {
|
||||
enablePositionIncrements = Booleans.parseBoolean(jp.getText(), true);
|
||||
} else if ("lowercaseExpandedTerms".equals(currentFieldName)) {
|
||||
} else if ("lowercase_expanded_terms".equals(currentFieldName)) {
|
||||
lowercaseExpandedTerms = Booleans.parseBoolean(jp.getText(), true);
|
||||
} else if ("phraseSlop".equals(currentFieldName)) {
|
||||
} else if ("phrase_slop".equals(currentFieldName)) {
|
||||
phraseSlop = Integer.parseInt(jp.getText());
|
||||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
analyzer = analysisService.analyzer(jp.getText());
|
||||
} else if ("defaultOperator".equals(currentFieldName)) {
|
||||
} else if ("default_operator".equals(currentFieldName)) {
|
||||
String op = jp.getText();
|
||||
if ("or".equalsIgnoreCase(op)) {
|
||||
defaultOperator = QueryParser.Operator.OR;
|
||||
@ -103,9 +103,9 @@ public class FieldJsonQueryParser extends AbstractIndexComponent implements Json
|
||||
} else {
|
||||
throw new QueryParsingException(index, "Query default operator [" + op + "] is not allowed");
|
||||
}
|
||||
} else if ("fuzzyMinSim".equals(currentFieldName)) {
|
||||
} else if ("fuzzy_min_sim".equals(currentFieldName)) {
|
||||
fuzzyMinSim = Float.parseFloat(jp.getText());
|
||||
} else if ("fuzzyPrefixLength".equals(currentFieldName)) {
|
||||
} else if ("fuzzy_prefix_length".equals(currentFieldName)) {
|
||||
fuzzyPrefixLength = Integer.parseInt(jp.getText());
|
||||
} else if ("escape".equals(currentFieldName)) {
|
||||
escape = Booleans.parseBoolean(jp.getText(), false);
|
||||
@ -115,15 +115,15 @@ public class FieldJsonQueryParser extends AbstractIndexComponent implements Json
|
||||
queryString = jp.getText();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = jp.getIntValue();
|
||||
} else if ("enablePositionIncrements".equals(currentFieldName)) {
|
||||
} else if ("enable_position_increments".equals(currentFieldName)) {
|
||||
enablePositionIncrements = jp.getIntValue() != 0;
|
||||
} else if ("lowercaseExpandedTerms".equals(currentFieldName)) {
|
||||
} else if ("lowercase_expanded_terms".equals(currentFieldName)) {
|
||||
lowercaseExpandedTerms = jp.getIntValue() != 0;
|
||||
} else if ("phraseSlop".equals(currentFieldName)) {
|
||||
} else if ("phrase_slop".equals(currentFieldName)) {
|
||||
phraseSlop = jp.getIntValue();
|
||||
} else if ("fuzzyMinSim".equals(currentFieldName)) {
|
||||
} else if ("fuzzy_min_sim".equals(currentFieldName)) {
|
||||
fuzzyMinSim = jp.getIntValue();
|
||||
} else if ("fuzzyPrefixLength".equals(currentFieldName)) {
|
||||
} else if ("fuzzy_prefix_length".equals(currentFieldName)) {
|
||||
fuzzyPrefixLength = jp.getIntValue();
|
||||
} else if ("escape".equals(currentFieldName)) {
|
||||
escape = jp.getIntValue() != 0;
|
||||
@ -133,15 +133,15 @@ public class FieldJsonQueryParser extends AbstractIndexComponent implements Json
|
||||
queryString = jp.getText();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = jp.getFloatValue();
|
||||
} else if ("fuzzyPrefixLength".equals(currentFieldName)) {
|
||||
} else if ("fuzzy_prefix_length".equals(currentFieldName)) {
|
||||
fuzzyPrefixLength = jp.getIntValue();
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_TRUE) {
|
||||
if ("query".equals(currentFieldName)) {
|
||||
queryString = jp.getText();
|
||||
} else if ("enablePositionIncrements".equals(currentFieldName)) {
|
||||
} else if ("enable_position_increments".equals(currentFieldName)) {
|
||||
enablePositionIncrements = true;
|
||||
} else if ("lowercaseExpandedTerms".equals(currentFieldName)) {
|
||||
} else if ("lowercase_expanded_terms".equals(currentFieldName)) {
|
||||
lowercaseExpandedTerms = true;
|
||||
} else if ("escape".equals(currentFieldName)) {
|
||||
escape = true;
|
||||
@ -149,9 +149,9 @@ public class FieldJsonQueryParser extends AbstractIndexComponent implements Json
|
||||
} else if (token == JsonToken.VALUE_FALSE) {
|
||||
if ("query".equals(currentFieldName)) {
|
||||
queryString = jp.getText();
|
||||
} else if ("enablePositionIncrements".equals(currentFieldName)) {
|
||||
} else if ("enable_position_increments".equals(currentFieldName)) {
|
||||
enablePositionIncrements = false;
|
||||
} else if ("lowercaseExpandedTerms".equals(currentFieldName)) {
|
||||
} else if ("lowercase_expanded_terms".equals(currentFieldName)) {
|
||||
lowercaseExpandedTerms = false;
|
||||
} else if ("escape".equals(currentFieldName)) {
|
||||
escape = false;
|
||||
|
@ -87,18 +87,18 @@ public class FuzzyLikeThisFieldJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
if (likeText == null) {
|
||||
throw new QueryBuilderException("fuzzyLikeThis requires 'likeText' to be provided");
|
||||
}
|
||||
builder.field("likeText", likeText);
|
||||
builder.field("like_text", likeText);
|
||||
if (maxNumTerms != null) {
|
||||
builder.field("maxNumTerms", maxNumTerms);
|
||||
builder.field("max_num_terms", maxNumTerms);
|
||||
}
|
||||
if (minSimilarity != null) {
|
||||
builder.field("minSimilarity", minSimilarity);
|
||||
builder.field("min_similarity", minSimilarity);
|
||||
}
|
||||
if (prefixLength != null) {
|
||||
builder.field("prefixLength", prefixLength);
|
||||
builder.field("prefix_length", prefixLength);
|
||||
}
|
||||
if (ignoreTF != null) {
|
||||
builder.field("ignoreTF", ignoreTF);
|
||||
builder.field("ignore_tf", ignoreTF);
|
||||
}
|
||||
if (boost != null) {
|
||||
builder.field("boost", boost);
|
||||
|
@ -39,7 +39,7 @@ import static org.elasticsearch.index.query.support.QueryParsers.*;
|
||||
/**
|
||||
* <pre>
|
||||
* {
|
||||
* fuzzyLikeThisField : {
|
||||
* fuzzy_like_This_field : {
|
||||
* field1 : {
|
||||
* maxNumTerms : 12,
|
||||
* boost : 1.1,
|
||||
@ -52,7 +52,7 @@ import static org.elasticsearch.index.query.support.QueryParsers.*;
|
||||
*/
|
||||
public class FuzzyLikeThisFieldJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "fuzzyLikeThisField";
|
||||
public static final String NAME = "fuzzy_like_this_field";
|
||||
|
||||
public FuzzyLikeThisFieldJsonQueryParser(Index index, @IndexSettings Settings indexSettings) {
|
||||
super(index, indexSettings);
|
||||
@ -86,25 +86,25 @@ public class FuzzyLikeThisFieldJsonQueryParser extends AbstractIndexComponent im
|
||||
if (token == JsonToken.FIELD_NAME) {
|
||||
currentFieldName = jp.getCurrentName();
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
if ("likeText".equals(currentFieldName)) {
|
||||
if ("like_text".equals(currentFieldName)) {
|
||||
likeText = jp.getText();
|
||||
} else if ("maxNumTerms".equals(currentFieldName)) {
|
||||
} else if ("max_num_terms".equals(currentFieldName)) {
|
||||
maxNumTerms = Integer.parseInt(jp.getText());
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = Float.parseFloat(jp.getText());
|
||||
} else if ("ignoreTF".equals(currentFieldName)) {
|
||||
} else if ("ignore_tf".equals(currentFieldName)) {
|
||||
ignoreTF = Booleans.parseBoolean(jp.getText(), false);
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_INT) {
|
||||
if ("maxNumTerms".equals(currentFieldName)) {
|
||||
if ("max_num_terms".equals(currentFieldName)) {
|
||||
maxNumTerms = jp.getIntValue();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = jp.getIntValue();
|
||||
} else if ("ignoreTF".equals(currentFieldName)) {
|
||||
} else if ("ignore_tf".equals(currentFieldName)) {
|
||||
ignoreTF = jp.getIntValue() != 0;
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_TRUE) {
|
||||
if ("ignoreTF".equals(currentFieldName)) {
|
||||
if ("ignore_tf".equals(currentFieldName)) {
|
||||
ignoreTF = true;
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_FLOAT) {
|
||||
@ -115,7 +115,7 @@ public class FuzzyLikeThisFieldJsonQueryParser extends AbstractIndexComponent im
|
||||
}
|
||||
|
||||
if (likeText == null) {
|
||||
throw new QueryParsingException(index, "fuzzyLikeThisField requires 'likeText' to be specified");
|
||||
throw new QueryParsingException(index, "fuzzy_like_This_field requires 'likeText' to be specified");
|
||||
}
|
||||
|
||||
Analyzer analyzer = null;
|
||||
|
@ -100,18 +100,18 @@ public class FuzzyLikeThisJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
if (likeText == null) {
|
||||
throw new QueryBuilderException("fuzzyLikeThis requires 'likeText' to be provided");
|
||||
}
|
||||
builder.field("likeText", likeText);
|
||||
builder.field("like_text", likeText);
|
||||
if (maxNumTerms != null) {
|
||||
builder.field("maxNumTerms", maxNumTerms);
|
||||
builder.field("max_num_terms", maxNumTerms);
|
||||
}
|
||||
if (minSimilarity != null) {
|
||||
builder.field("minSimilarity", minSimilarity);
|
||||
builder.field("min_similarity", minSimilarity);
|
||||
}
|
||||
if (prefixLength != null) {
|
||||
builder.field("prefixLength", prefixLength);
|
||||
builder.field("prefix_length", prefixLength);
|
||||
}
|
||||
if (ignoreTF != null) {
|
||||
builder.field("ignoreTF", ignoreTF);
|
||||
builder.field("ignore_tf", ignoreTF);
|
||||
}
|
||||
if (boost != null) {
|
||||
builder.field("boost", boost);
|
||||
|
@ -38,7 +38,7 @@ import java.util.List;
|
||||
/**
|
||||
* <pre>
|
||||
* {
|
||||
* fuzzyLikeThis : {
|
||||
* fuzzy_like_this : {
|
||||
* maxNumTerms : 12,
|
||||
* boost : 1.1,
|
||||
* fields : ["field1", "field2"]
|
||||
@ -51,7 +51,7 @@ import java.util.List;
|
||||
*/
|
||||
public class FuzzyLikeThisJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "fuzzyLikeThis";
|
||||
public static final String NAME = "fuzzy_like_this";
|
||||
|
||||
public FuzzyLikeThisJsonQueryParser(Index index, @IndexSettings Settings indexSettings) {
|
||||
super(index, indexSettings);
|
||||
@ -78,25 +78,25 @@ public class FuzzyLikeThisJsonQueryParser extends AbstractIndexComponent impleme
|
||||
if (token == JsonToken.FIELD_NAME) {
|
||||
currentFieldName = jp.getCurrentName();
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
if ("likeText".equals(currentFieldName)) {
|
||||
if ("like_text".equals(currentFieldName)) {
|
||||
likeText = jp.getText();
|
||||
} else if ("maxNumTerms".equals(currentFieldName)) {
|
||||
} else if ("max_numT_terms".equals(currentFieldName)) {
|
||||
maxNumTerms = Integer.parseInt(jp.getText());
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = Float.parseFloat(jp.getText());
|
||||
} else if ("ignoreTF".equals(currentFieldName)) {
|
||||
} else if ("ignore_tf".equals(currentFieldName)) {
|
||||
ignoreTF = Booleans.parseBoolean(jp.getText(), false);
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_INT) {
|
||||
if ("maxNumTerms".equals(currentFieldName)) {
|
||||
if ("max_num_terms".equals(currentFieldName)) {
|
||||
maxNumTerms = jp.getIntValue();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = jp.getIntValue();
|
||||
} else if ("ignoreTF".equals(currentFieldName)) {
|
||||
} else if ("ignore_tf".equals(currentFieldName)) {
|
||||
ignoreTF = jp.getIntValue() != 0;
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_TRUE) {
|
||||
if ("ignoreTF".equals(currentFieldName)) {
|
||||
if ("ignore_tf".equals(currentFieldName)) {
|
||||
ignoreTF = true;
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_FLOAT) {
|
||||
@ -114,7 +114,7 @@ public class FuzzyLikeThisJsonQueryParser extends AbstractIndexComponent impleme
|
||||
}
|
||||
|
||||
if (likeText == null) {
|
||||
throw new QueryParsingException(index, "fuzzyLikeThis requires 'likeText' to be specified");
|
||||
throw new QueryParsingException(index, "fuzzy_like_this requires 'likeText' to be specified");
|
||||
}
|
||||
|
||||
FuzzyLikeThisQuery query = new FuzzyLikeThisQuery(maxNumTerms, parseContext.mapperService().searchAnalyzer());
|
||||
|
@ -57,7 +57,7 @@ public class MatchAllJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
if (normsField != null) {
|
||||
builder.field("normsField", normsField);
|
||||
builder.field("norms_field", normsField);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -33,11 +33,11 @@ import org.elasticsearch.util.settings.Settings;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class MatchAllJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "matchAll";
|
||||
public static final String NAME = "match_all";
|
||||
|
||||
@Inject public MatchAllJsonQueryParser(Index index, @IndexSettings Settings settings) {
|
||||
super(index, settings);
|
||||
@ -61,7 +61,7 @@ public class MatchAllJsonQueryParser extends AbstractIndexComponent implements J
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
if ("boost".equals(currentFieldName)) {
|
||||
boost = Float.parseFloat(jp.getText());
|
||||
} else if ("normsField".equals(currentFieldName)) {
|
||||
} else if ("norms_field".equals(currentFieldName)) {
|
||||
normsField = parseContext.indexName(jp.getText());
|
||||
}
|
||||
} else {
|
||||
|
@ -35,7 +35,7 @@ public class MoreLikeThisFieldJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
|
||||
private String likeText;
|
||||
private float percentTermsToMatch = -1;
|
||||
private int minTermFrequency = -1;
|
||||
private int minTermFreq = -1;
|
||||
private int maxQueryTerms = -1;
|
||||
private String[] stopWords = null;
|
||||
private int minDocFreq = -1;
|
||||
@ -44,6 +44,7 @@ public class MoreLikeThisFieldJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
private int maxWordLen = -1;
|
||||
private Boolean boostTerms = null;
|
||||
private float boostTermsFactor = -1;
|
||||
private float boost = -1;
|
||||
|
||||
/**
|
||||
* A more like this query that runs against a specific field.
|
||||
@ -74,8 +75,8 @@ public class MoreLikeThisFieldJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
* The frequency below which terms will be ignored in the source doc. The default
|
||||
* frequency is <tt>2</tt>.
|
||||
*/
|
||||
public MoreLikeThisFieldJsonQueryBuilder minTermFrequency(int minTermFrequency) {
|
||||
this.minTermFrequency = minTermFrequency;
|
||||
public MoreLikeThisFieldJsonQueryBuilder minTermFreq(int minTermFreqy) {
|
||||
this.minTermFreq = minTermFreqy;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -153,46 +154,54 @@ public class MoreLikeThisFieldJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
public MoreLikeThisFieldJsonQueryBuilder boost(float boost) {
|
||||
this.boost = boost;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override protected void doJson(JsonBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(MoreLikeThisFieldJsonQueryParser.NAME);
|
||||
builder.startObject(name);
|
||||
if (likeText == null) {
|
||||
throw new QueryBuilderException("moreLikeThisField requires 'likeText' to be provided");
|
||||
throw new QueryBuilderException("moreLikeThisField requires 'like_text' to be provided");
|
||||
}
|
||||
builder.field("likeText", likeText);
|
||||
builder.field("like_text", likeText);
|
||||
if (percentTermsToMatch != -1) {
|
||||
builder.field("percentTermsToMatch", percentTermsToMatch);
|
||||
builder.field("percent_terms_to_match", percentTermsToMatch);
|
||||
}
|
||||
if (minTermFrequency != -1) {
|
||||
builder.field("minTermFrequency", minTermFrequency);
|
||||
if (minTermFreq != -1) {
|
||||
builder.field("min_term_freq", minTermFreq);
|
||||
}
|
||||
if (maxQueryTerms != -1) {
|
||||
builder.field("maxQueryTerms", maxQueryTerms);
|
||||
builder.field("max_query_terms", maxQueryTerms);
|
||||
}
|
||||
if (stopWords != null && stopWords.length > 0) {
|
||||
builder.startArray("stopWords");
|
||||
builder.startArray("stop_words");
|
||||
for (String stopWord : stopWords) {
|
||||
builder.value(stopWord);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
if (minDocFreq != -1) {
|
||||
builder.field("minDocFreq", minDocFreq);
|
||||
builder.field("min_doc_freq", minDocFreq);
|
||||
}
|
||||
if (maxDocFreq != -1) {
|
||||
builder.field("maxDocFreq", maxDocFreq);
|
||||
builder.field("max_doc_freq", maxDocFreq);
|
||||
}
|
||||
if (minWordLen != -1) {
|
||||
builder.field("minWordLen", minWordLen);
|
||||
builder.field("min_word_len", minWordLen);
|
||||
}
|
||||
if (maxWordLen != -1) {
|
||||
builder.field("maxWordLen", maxWordLen);
|
||||
builder.field("max_word_len", maxWordLen);
|
||||
}
|
||||
if (boostTerms != null) {
|
||||
builder.field("boostTerms", boostTerms);
|
||||
builder.field("boost_terms", boostTerms);
|
||||
}
|
||||
if (boostTermsFactor != -1) {
|
||||
builder.field("boostTermsFactor", boostTermsFactor);
|
||||
builder.field("boost_terms_factor", boostTermsFactor);
|
||||
}
|
||||
if (boost != -1) {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
|
@ -42,7 +42,7 @@ import static org.elasticsearch.index.query.support.QueryParsers.*;
|
||||
*/
|
||||
public class MoreLikeThisFieldJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "moreLikeThisField";
|
||||
public static final String NAME = "more_like_this_field";
|
||||
|
||||
public MoreLikeThisFieldJsonQueryParser(Index index, @IndexSettings Settings indexSettings) {
|
||||
super(index, indexSettings);
|
||||
@ -72,55 +72,59 @@ public class MoreLikeThisFieldJsonQueryParser extends AbstractIndexComponent imp
|
||||
if (token == JsonToken.FIELD_NAME) {
|
||||
currentFieldName = jp.getCurrentName();
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
if ("likeText".equals(currentFieldName)) {
|
||||
if ("like_text".equals(currentFieldName)) {
|
||||
mltQuery.setLikeText(jp.getText());
|
||||
} else if ("minTermFrequency".equals(currentFieldName)) {
|
||||
} else if ("min_term_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMinTermFrequency(Integer.parseInt(jp.getText()));
|
||||
} else if ("maxQueryTerms".equals(currentFieldName)) {
|
||||
} else if ("max_query_terms".equals(currentFieldName)) {
|
||||
mltQuery.setMaxQueryTerms(Integer.parseInt(jp.getText()));
|
||||
} else if ("minDocFreq".equals(currentFieldName)) {
|
||||
} else if ("min_doc_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMinDocFreq(Integer.parseInt(jp.getText()));
|
||||
} else if ("maxDocFreq".equals(currentFieldName)) {
|
||||
} else if ("max_doc_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMaxDocFreq(Integer.parseInt(jp.getText()));
|
||||
} else if ("minWordLen".equals(currentFieldName)) {
|
||||
} else if ("min_word_len".equals(currentFieldName)) {
|
||||
mltQuery.setMinWordLen(Integer.parseInt(jp.getText()));
|
||||
} else if ("maxWordLen".equals(currentFieldName)) {
|
||||
} else if ("max_word_len".equals(currentFieldName)) {
|
||||
mltQuery.setMaxWordLen(Integer.parseInt(jp.getText()));
|
||||
} else if ("boostTerms".equals(currentFieldName)) {
|
||||
} else if ("boost_terms".equals(currentFieldName)) {
|
||||
mltQuery.setBoostTerms(Booleans.parseBoolean(jp.getText(), false));
|
||||
} else if ("boostTermsFactor".equals(currentFieldName)) {
|
||||
} else if ("boost_terms_factor".equals(currentFieldName)) {
|
||||
mltQuery.setBoostTermsFactor(Float.parseFloat(jp.getText()));
|
||||
} else if ("percentTermsToMatch".equals(currentFieldName)) {
|
||||
} else if ("percent_terms_to_match".equals(currentFieldName)) {
|
||||
mltQuery.setPercentTermsToMatch(Float.parseFloat(jp.getText()));
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_INT) {
|
||||
if ("minTermFrequency".equals(currentFieldName)) {
|
||||
if ("min_term_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMinTermFrequency(jp.getIntValue());
|
||||
} else if ("maxQueryTerms".equals(currentFieldName)) {
|
||||
} else if ("max_query_terms".equals(currentFieldName)) {
|
||||
mltQuery.setMaxQueryTerms(jp.getIntValue());
|
||||
} else if ("minDocFreq".equals(currentFieldName)) {
|
||||
} else if ("min_doc_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMinDocFreq(jp.getIntValue());
|
||||
} else if ("maxDocFreq".equals(currentFieldName)) {
|
||||
} else if ("max_doc_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMaxDocFreq(jp.getIntValue());
|
||||
} else if ("minWordLen".equals(currentFieldName)) {
|
||||
} else if ("min_word_len".equals(currentFieldName)) {
|
||||
mltQuery.setMinWordLen(jp.getIntValue());
|
||||
} else if ("maxWordLen".equals(currentFieldName)) {
|
||||
} else if ("max_word_len".equals(currentFieldName)) {
|
||||
mltQuery.setMaxWordLen(jp.getIntValue());
|
||||
} else if ("boostTerms".equals(currentFieldName)) {
|
||||
} else if ("boost_terms".equals(currentFieldName)) {
|
||||
mltQuery.setBoostTerms(jp.getIntValue() != 0);
|
||||
} else if ("boostTermsFactor".equals(currentFieldName)) {
|
||||
} else if ("boost_term_factor".equals(currentFieldName)) {
|
||||
mltQuery.setBoostTermsFactor(jp.getIntValue());
|
||||
} else if ("percentTermsToMatch".equals(currentFieldName)) {
|
||||
} else if ("percent_terms_to_match".equals(currentFieldName)) {
|
||||
mltQuery.setPercentTermsToMatch(jp.getIntValue());
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
mltQuery.setBoost(jp.getIntValue());
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_FLOAT) {
|
||||
if ("boostTermsFactor".equals(currentFieldName)) {
|
||||
if ("boost_terms_factor".equals(currentFieldName)) {
|
||||
mltQuery.setBoostTermsFactor(jp.getFloatValue());
|
||||
} else if ("percentTermsToMatch".equals(currentFieldName)) {
|
||||
} else if ("percent_terms_to_match".equals(currentFieldName)) {
|
||||
mltQuery.setPercentTermsToMatch(jp.getFloatValue());
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
mltQuery.setBoost(jp.getFloatValue());
|
||||
}
|
||||
} else if (token == JsonToken.START_ARRAY) {
|
||||
if ("stopWords".equals(currentFieldName)) {
|
||||
if ("stop_words".equals(currentFieldName)) {
|
||||
Set<String> stopWords = Sets.newHashSet();
|
||||
while ((token = jp.nextToken()) != JsonToken.END_ARRAY) {
|
||||
stopWords.add(jp.getText());
|
||||
@ -131,7 +135,7 @@ public class MoreLikeThisFieldJsonQueryParser extends AbstractIndexComponent imp
|
||||
}
|
||||
|
||||
if (mltQuery.getLikeText() == null) {
|
||||
throw new QueryParsingException(index, "moreLikeThisField requires 'likeText' to be specified");
|
||||
throw new QueryParsingException(index, "more_like_this_field requires 'like_text' to be specified");
|
||||
}
|
||||
|
||||
// move to the next end object, to close the field name
|
||||
|
@ -36,7 +36,7 @@ public class MoreLikeThisJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
|
||||
private String likeText;
|
||||
private float percentTermsToMatch = -1;
|
||||
private int minTermFrequency = -1;
|
||||
private int minTermFreq = -1;
|
||||
private int maxQueryTerms = -1;
|
||||
private String[] stopWords = null;
|
||||
private int minDocFreq = -1;
|
||||
@ -45,6 +45,7 @@ public class MoreLikeThisJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
private int maxWordLen = -1;
|
||||
private Boolean boostTerms = null;
|
||||
private float boostTermsFactor = -1;
|
||||
private float boost = -1;
|
||||
|
||||
/**
|
||||
* Constructs a new more like this query which uses the "_all" field.
|
||||
@ -82,8 +83,8 @@ public class MoreLikeThisJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
* The frequency below which terms will be ignored in the source doc. The default
|
||||
* frequency is <tt>2</tt>.
|
||||
*/
|
||||
public MoreLikeThisJsonQueryBuilder minTermFrequency(int minTermFrequency) {
|
||||
this.minTermFrequency = minTermFrequency;
|
||||
public MoreLikeThisJsonQueryBuilder minTermFreq(int minTermFreq) {
|
||||
this.minTermFreq = minTermFreq;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -161,6 +162,11 @@ public class MoreLikeThisJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
return this;
|
||||
}
|
||||
|
||||
public MoreLikeThisJsonQueryBuilder boost(float boost) {
|
||||
this.boost = boost;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override protected void doJson(JsonBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(MoreLikeThisJsonQueryParser.NAME);
|
||||
if (fields != null) {
|
||||
@ -173,40 +179,43 @@ public class MoreLikeThisJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
if (likeText == null) {
|
||||
throw new QueryBuilderException("moreLikeThis requires 'likeText' to be provided");
|
||||
}
|
||||
builder.field("likeText", likeText);
|
||||
builder.field("like_text", likeText);
|
||||
if (percentTermsToMatch != -1) {
|
||||
builder.field("percentTermsToMatch", percentTermsToMatch);
|
||||
builder.field("percent_terms_to_match", percentTermsToMatch);
|
||||
}
|
||||
if (minTermFrequency != -1) {
|
||||
builder.field("minTermFrequency", minTermFrequency);
|
||||
if (minTermFreq != -1) {
|
||||
builder.field("min_term_freq", minTermFreq);
|
||||
}
|
||||
if (maxQueryTerms != -1) {
|
||||
builder.field("maxQueryTerms", maxQueryTerms);
|
||||
builder.field("max_query_terms", maxQueryTerms);
|
||||
}
|
||||
if (stopWords != null && stopWords.length > 0) {
|
||||
builder.startArray("stopWords");
|
||||
builder.startArray("stop_words");
|
||||
for (String stopWord : stopWords) {
|
||||
builder.value(stopWord);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
if (minDocFreq != -1) {
|
||||
builder.field("minDocFreq", minDocFreq);
|
||||
builder.field("min_doc_freq", minDocFreq);
|
||||
}
|
||||
if (maxDocFreq != -1) {
|
||||
builder.field("maxDocFreq", maxDocFreq);
|
||||
builder.field("max_doc_freq", maxDocFreq);
|
||||
}
|
||||
if (minWordLen != -1) {
|
||||
builder.field("minWordLen", minWordLen);
|
||||
builder.field("min_word_len", minWordLen);
|
||||
}
|
||||
if (maxWordLen != -1) {
|
||||
builder.field("maxWordLen", maxWordLen);
|
||||
builder.field("max_word_len", maxWordLen);
|
||||
}
|
||||
if (boostTerms != null) {
|
||||
builder.field("boostTerms", boostTerms);
|
||||
builder.field("boost_terms", boostTerms);
|
||||
}
|
||||
if (boostTermsFactor != -1) {
|
||||
builder.field("boostTermsFactor", boostTermsFactor);
|
||||
builder.field("boost_terms_factor", boostTermsFactor);
|
||||
}
|
||||
if (boost != -1) {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ import static com.google.common.collect.Lists.*;
|
||||
*/
|
||||
public class MoreLikeThisJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "moreLikeThis";
|
||||
public static final String NAME = "more_like_this";
|
||||
|
||||
public MoreLikeThisJsonQueryParser(Index index, @IndexSettings Settings indexSettings) {
|
||||
super(index, indexSettings);
|
||||
@ -66,55 +66,59 @@ public class MoreLikeThisJsonQueryParser extends AbstractIndexComponent implemen
|
||||
if (token == JsonToken.FIELD_NAME) {
|
||||
currentFieldName = jp.getCurrentName();
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
if ("likeText".equals(currentFieldName)) {
|
||||
if ("like_text".equals(currentFieldName)) {
|
||||
mltQuery.setLikeText(jp.getText());
|
||||
} else if ("minTermFrequency".equals(currentFieldName)) {
|
||||
} else if ("min_term_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMinTermFrequency(Integer.parseInt(jp.getText()));
|
||||
} else if ("maxQueryTerms".equals(currentFieldName)) {
|
||||
} else if ("max_query_terms".equals(currentFieldName)) {
|
||||
mltQuery.setMaxQueryTerms(Integer.parseInt(jp.getText()));
|
||||
} else if ("minDocFreq".equals(currentFieldName)) {
|
||||
} else if ("min_doc_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMinDocFreq(Integer.parseInt(jp.getText()));
|
||||
} else if ("maxDocFreq".equals(currentFieldName)) {
|
||||
} else if ("max_doc_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMaxDocFreq(Integer.parseInt(jp.getText()));
|
||||
} else if ("minWordLen".equals(currentFieldName)) {
|
||||
} else if ("min_word_len".equals(currentFieldName)) {
|
||||
mltQuery.setMinWordLen(Integer.parseInt(jp.getText()));
|
||||
} else if ("maxWordLen".equals(currentFieldName)) {
|
||||
} else if ("max_word_len".equals(currentFieldName)) {
|
||||
mltQuery.setMaxWordLen(Integer.parseInt(jp.getText()));
|
||||
} else if ("boostTerms".equals(currentFieldName)) {
|
||||
} else if ("boost_terms".equals(currentFieldName)) {
|
||||
mltQuery.setBoostTerms(Booleans.parseBoolean(jp.getText(), false));
|
||||
} else if ("boostTermsFactor".equals(currentFieldName)) {
|
||||
} else if ("boost_terms_factor".equals(currentFieldName)) {
|
||||
mltQuery.setBoostTermsFactor(Float.parseFloat(jp.getText()));
|
||||
} else if ("percentTermsToMatch".equals(currentFieldName)) {
|
||||
} else if ("percent_terms_to_match".equals(currentFieldName)) {
|
||||
mltQuery.setPercentTermsToMatch(Float.parseFloat(jp.getText()));
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_INT) {
|
||||
if ("minTermFrequency".equals(currentFieldName)) {
|
||||
if ("min_term_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMinTermFrequency(jp.getIntValue());
|
||||
} else if ("maxQueryTerms".equals(currentFieldName)) {
|
||||
} else if ("max_query_terms".equals(currentFieldName)) {
|
||||
mltQuery.setMaxQueryTerms(jp.getIntValue());
|
||||
} else if ("minDocFreq".equals(currentFieldName)) {
|
||||
} else if ("min_doc_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMinDocFreq(jp.getIntValue());
|
||||
} else if ("maxDocFreq".equals(currentFieldName)) {
|
||||
} else if ("max_doc_freq".equals(currentFieldName)) {
|
||||
mltQuery.setMaxDocFreq(jp.getIntValue());
|
||||
} else if ("minWordLen".equals(currentFieldName)) {
|
||||
} else if ("min_word_len".equals(currentFieldName)) {
|
||||
mltQuery.setMinWordLen(jp.getIntValue());
|
||||
} else if ("maxWordLen".equals(currentFieldName)) {
|
||||
} else if ("max_word_len".equals(currentFieldName)) {
|
||||
mltQuery.setMaxWordLen(jp.getIntValue());
|
||||
} else if ("boostTerms".equals(currentFieldName)) {
|
||||
} else if ("boost_terms".equals(currentFieldName)) {
|
||||
mltQuery.setBoostTerms(jp.getIntValue() != 0);
|
||||
} else if ("boostTermsFactor".equals(currentFieldName)) {
|
||||
} else if ("boost_terms_factor".equals(currentFieldName)) {
|
||||
mltQuery.setBoostTermsFactor(jp.getIntValue());
|
||||
} else if ("percentTermsToMatch".equals(currentFieldName)) {
|
||||
} else if ("percent_terms_to_match".equals(currentFieldName)) {
|
||||
mltQuery.setPercentTermsToMatch(jp.getIntValue());
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
mltQuery.setBoost(jp.getIntValue());
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_FLOAT) {
|
||||
if ("boostTermsFactor".equals(currentFieldName)) {
|
||||
if ("boost_terms_factor".equals(currentFieldName)) {
|
||||
mltQuery.setBoostTermsFactor(jp.getFloatValue());
|
||||
} else if ("percentTermsToMatch".equals(currentFieldName)) {
|
||||
} else if ("percent_terms_to_match".equals(currentFieldName)) {
|
||||
mltQuery.setPercentTermsToMatch(jp.getFloatValue());
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
mltQuery.setBoost(jp.getFloatValue());
|
||||
}
|
||||
} else if (token == JsonToken.START_ARRAY) {
|
||||
if ("stopWords".equals(currentFieldName)) {
|
||||
if ("stop_words".equals(currentFieldName)) {
|
||||
Set<String> stopWords = Sets.newHashSet();
|
||||
while ((token = jp.nextToken()) != JsonToken.END_ARRAY) {
|
||||
stopWords.add(jp.getText());
|
||||
@ -131,10 +135,10 @@ public class MoreLikeThisJsonQueryParser extends AbstractIndexComponent implemen
|
||||
}
|
||||
|
||||
if (mltQuery.getLikeText() == null) {
|
||||
throw new QueryParsingException(index, "moreLikeThis requires 'likeText' to be specified");
|
||||
throw new QueryParsingException(index, "more_like_this requires 'like_text' to be specified");
|
||||
}
|
||||
if (mltQuery.getMoreLikeFields() == null || mltQuery.getMoreLikeFields().length == 0) {
|
||||
throw new QueryParsingException(index, "moreLikeThis requires 'fields' to be specified");
|
||||
throw new QueryParsingException(index, "more_like_this requires 'fields' to be specified");
|
||||
}
|
||||
|
||||
// move to the next end object, to close the field name
|
||||
|
@ -221,7 +221,7 @@ public class QueryStringJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
builder.startObject(QueryStringJsonQueryParser.NAME);
|
||||
builder.field("query", queryString);
|
||||
if (defaultField != null) {
|
||||
builder.field("defaultField", defaultField);
|
||||
builder.field("default_field", defaultField);
|
||||
}
|
||||
if (fields != null) {
|
||||
builder.startArray("fields");
|
||||
@ -238,37 +238,37 @@ public class QueryStringJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
builder.endArray();
|
||||
}
|
||||
if (useDisMax != null) {
|
||||
builder.field("useDisMax", useDisMax);
|
||||
builder.field("use_dis_max", useDisMax);
|
||||
}
|
||||
if (tieBreaker != -1) {
|
||||
builder.field("tieBreaker", tieBreaker);
|
||||
builder.field("tie_breaker", tieBreaker);
|
||||
}
|
||||
if (defaultOperator != null) {
|
||||
builder.field("defaultOperator", defaultOperator.name().toLowerCase());
|
||||
builder.field("default_operator", defaultOperator.name().toLowerCase());
|
||||
}
|
||||
if (analyzer != null) {
|
||||
builder.field("analyzer", analyzer);
|
||||
}
|
||||
if (allowLeadingWildcard != null) {
|
||||
builder.field("allowLeadingWildcard", allowLeadingWildcard);
|
||||
builder.field("allow_leading_wildcard", allowLeadingWildcard);
|
||||
}
|
||||
if (lowercaseExpandedTerms != null) {
|
||||
builder.field("lowercaseExpandedTerms", lowercaseExpandedTerms);
|
||||
builder.field("lowercase_expanded_terms", lowercaseExpandedTerms);
|
||||
}
|
||||
if (enablePositionIncrements != null) {
|
||||
builder.field("enablePositionIncrements", enablePositionIncrements);
|
||||
builder.field("enable_position_increments", enablePositionIncrements);
|
||||
}
|
||||
if (fuzzyMinSim != -1) {
|
||||
builder.field("fuzzyMinSim", fuzzyMinSim);
|
||||
builder.field("fuzzy_min_sim", fuzzyMinSim);
|
||||
}
|
||||
if (boost != -1) {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
if (fuzzyPrefixLength != -1) {
|
||||
builder.field("fuzzyPrefixLength", fuzzyPrefixLength);
|
||||
builder.field("fuzzy_prefix_length", fuzzyPrefixLength);
|
||||
}
|
||||
if (phraseSlop != -1) {
|
||||
builder.field("phraseSlop", phraseSlop);
|
||||
builder.field("phrase_slop", phraseSlop);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ import static org.elasticsearch.util.lucene.search.Queries.*;
|
||||
*/
|
||||
public class QueryStringJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "queryString";
|
||||
public static final String NAME = "query_string";
|
||||
|
||||
private final AnalysisService analysisService;
|
||||
|
||||
@ -123,9 +123,9 @@ public class QueryStringJsonQueryParser extends AbstractIndexComponent implement
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
if ("query".equals(currentFieldName)) {
|
||||
queryString = jp.getText();
|
||||
} else if ("defaultField".equals(currentFieldName)) {
|
||||
} else if ("default_field".equals(currentFieldName)) {
|
||||
defaultField = parseContext.indexName(jp.getText());
|
||||
} else if ("defaultOperator".equals(currentFieldName)) {
|
||||
} else if ("default_operator".equals(currentFieldName)) {
|
||||
String op = jp.getText();
|
||||
if ("or".equalsIgnoreCase(op)) {
|
||||
defaultOperator = QueryParser.Operator.OR;
|
||||
@ -136,73 +136,73 @@ public class QueryStringJsonQueryParser extends AbstractIndexComponent implement
|
||||
}
|
||||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
analyzer = analysisService.analyzer(jp.getText());
|
||||
} else if ("allowLeadingWildcard".equals(currentFieldName)) {
|
||||
} else if ("allow_leading_wildcard".equals(currentFieldName)) {
|
||||
allowLeadingWildcard = Booleans.parseBoolean(jp.getText(), false);
|
||||
} else if ("lowercaseExpandedTerms".equals(currentFieldName)) {
|
||||
} else if ("lowercase_expanded_terms".equals(currentFieldName)) {
|
||||
lowercaseExpandedTerms = Booleans.parseBoolean(jp.getText(), false);
|
||||
} else if ("enablePositionIncrements".equals(currentFieldName)) {
|
||||
} else if ("enable_position_increments".equals(currentFieldName)) {
|
||||
enablePositionIncrements = Booleans.parseBoolean(jp.getText(), false);
|
||||
} else if ("escape".equals(currentFieldName)) {
|
||||
escape = Booleans.parseBoolean(jp.getText(), false);
|
||||
} else if ("useDisMax".equals(currentFieldName)) {
|
||||
} else if ("use_dis_max".equals(currentFieldName)) {
|
||||
useDisMax = Booleans.parseBoolean(jp.getText(), false);
|
||||
} else if ("fuzzyPrefixLength".equals(currentFieldName)) {
|
||||
} else if ("fuzzy_prefix_length".equals(currentFieldName)) {
|
||||
fuzzyPrefixLength = Integer.parseInt(jp.getText());
|
||||
} else if ("phraseSlop".equals(currentFieldName)) {
|
||||
} else if ("phrase_slop".equals(currentFieldName)) {
|
||||
phraseSlop = Integer.parseInt(jp.getText());
|
||||
} else if ("fuzzyMinSim".equals(currentFieldName)) {
|
||||
} else if ("fuzzy_min_sim".equals(currentFieldName)) {
|
||||
fuzzyMinSim = Float.parseFloat(jp.getText());
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = Float.parseFloat(jp.getText());
|
||||
} else if ("tieBreaker".equals(currentFieldName)) {
|
||||
} else if ("tie_breaker".equals(currentFieldName)) {
|
||||
tieBreaker = Float.parseFloat(jp.getText());
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE) {
|
||||
if ("allowLeadingWildcard".equals(currentFieldName)) {
|
||||
if ("allow_leading_wildcard".equals(currentFieldName)) {
|
||||
allowLeadingWildcard = token == JsonToken.VALUE_TRUE;
|
||||
} else if ("lowercaseExpandedTerms".equals(currentFieldName)) {
|
||||
} else if ("lowercase_expanded_terms".equals(currentFieldName)) {
|
||||
lowercaseExpandedTerms = token == JsonToken.VALUE_TRUE;
|
||||
} else if ("enablePositionIncrements".equals(currentFieldName)) {
|
||||
} else if ("enable_position_increments".equals(currentFieldName)) {
|
||||
enablePositionIncrements = token == JsonToken.VALUE_TRUE;
|
||||
} else if ("escape".equals(currentFieldName)) {
|
||||
escape = token == JsonToken.VALUE_TRUE;
|
||||
} else if ("useDisMax".equals(currentFieldName)) {
|
||||
} else if ("use_dis_max".equals(currentFieldName)) {
|
||||
useDisMax = token == JsonToken.VALUE_TRUE;
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_FLOAT) {
|
||||
if ("fuzzyMinSim".equals(currentFieldName)) {
|
||||
if ("fuzzy_min_sim".equals(currentFieldName)) {
|
||||
fuzzyMinSim = jp.getFloatValue();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = jp.getFloatValue();
|
||||
} else if ("tieBreaker".equals(currentFieldName)) {
|
||||
} else if ("tie_breaker".equals(currentFieldName)) {
|
||||
tieBreaker = jp.getFloatValue();
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_INT) {
|
||||
if ("fuzzyPrefixLength".equals(currentFieldName)) {
|
||||
if ("fuzzy_prefix_length".equals(currentFieldName)) {
|
||||
fuzzyPrefixLength = jp.getIntValue();
|
||||
} else if ("phraseSlop".equals(currentFieldName)) {
|
||||
} else if ("phrase_slop".equals(currentFieldName)) {
|
||||
phraseSlop = jp.getIntValue();
|
||||
} else if ("fuzzyMinSim".equals(currentFieldName)) {
|
||||
} else if ("fuzzy_min_sim".equals(currentFieldName)) {
|
||||
fuzzyMinSim = jp.getFloatValue();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = jp.getFloatValue();
|
||||
} else if ("allowLeadingWildcard".equals(currentFieldName)) {
|
||||
} else if ("allow_leading_wildcard".equals(currentFieldName)) {
|
||||
allowLeadingWildcard = jp.getIntValue() != 0;
|
||||
} else if ("lowercaseExpandedTerms".equals(currentFieldName)) {
|
||||
} else if ("lowercase_expanded_terms".equals(currentFieldName)) {
|
||||
lowercaseExpandedTerms = jp.getIntValue() != 0;
|
||||
} else if ("enablePositionIncrements".equals(currentFieldName)) {
|
||||
} else if ("enable_position_increments".equals(currentFieldName)) {
|
||||
enablePositionIncrements = jp.getIntValue() != 0;
|
||||
} else if ("escape".equals(currentFieldName)) {
|
||||
escape = jp.getIntValue() != 0;
|
||||
} else if ("useDisMax".equals(currentFieldName)) {
|
||||
} else if ("use_dis_max".equals(currentFieldName)) {
|
||||
useDisMax = jp.getIntValue() != 0;
|
||||
} else if ("tieBreaker".equals(currentFieldName)) {
|
||||
} else if ("tie_breaker".equals(currentFieldName)) {
|
||||
tieBreaker = jp.getFloatValue();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (queryString == null) {
|
||||
throw new QueryParsingException(index, "QueryString must be provided with a [query]");
|
||||
throw new QueryParsingException(index, "query_string must be provided with a [query]");
|
||||
}
|
||||
if (analyzer == null) {
|
||||
analyzer = parseContext.mapperService().searchAnalyzer();
|
||||
|
@ -150,8 +150,8 @@ public class RangeJsonFilterBuilder extends BaseJsonFilterBuilder {
|
||||
builder.startObject(name);
|
||||
builder.field("from", from);
|
||||
builder.field("to", to);
|
||||
builder.field("includeLower", includeLower);
|
||||
builder.field("includeUpper", includeUpper);
|
||||
builder.field("include_lower", includeLower);
|
||||
builder.field("include_upper", includeUpper);
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ public class RangeJsonFilterParser extends AbstractIndexComponent implements Jso
|
||||
} else {
|
||||
to = jp.getText();
|
||||
}
|
||||
} else if ("includeLower".equals(currentFieldName)) {
|
||||
} else if ("include_lower".equals(currentFieldName)) {
|
||||
if (token == JsonToken.VALUE_NUMBER_INT) {
|
||||
includeLower = jp.getIntValue() != 0;
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
@ -92,7 +92,7 @@ public class RangeJsonFilterParser extends AbstractIndexComponent implements Jso
|
||||
} else {
|
||||
includeLower = token == JsonToken.VALUE_TRUE;
|
||||
}
|
||||
} else if ("includeUpper".equals(currentFieldName)) {
|
||||
} else if ("include_upper".equals(currentFieldName)) {
|
||||
if (token == JsonToken.VALUE_NUMBER_INT) {
|
||||
includeUpper = jp.getIntValue() != 0;
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
|
@ -177,8 +177,8 @@ public class RangeJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
builder.startObject(name);
|
||||
builder.field("from", from);
|
||||
builder.field("to", to);
|
||||
builder.field("includeLower", includeLower);
|
||||
builder.field("includeUpper", includeUpper);
|
||||
builder.field("include_lower", includeLower);
|
||||
builder.field("include_upper", includeUpper);
|
||||
if (boost != -1) {
|
||||
builder.field("boost", boost);
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ public class RangeJsonQueryParser extends AbstractIndexComponent implements Json
|
||||
} else {
|
||||
to = jp.getText();
|
||||
}
|
||||
} else if ("includeLower".equals(currentFieldName)) {
|
||||
} else if ("include_lower".equals(currentFieldName)) {
|
||||
if (token == JsonToken.VALUE_NUMBER_INT) {
|
||||
includeLower = jp.getIntValue() != 0;
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
@ -89,7 +89,7 @@ public class RangeJsonQueryParser extends AbstractIndexComponent implements Json
|
||||
} else {
|
||||
includeLower = token == JsonToken.VALUE_TRUE;
|
||||
}
|
||||
} else if ("includeUpper".equals(currentFieldName)) {
|
||||
} else if ("include_upper".equals(currentFieldName)) {
|
||||
if (token == JsonToken.VALUE_NUMBER_INT) {
|
||||
includeUpper = jp.getIntValue() != 0;
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
|
@ -38,7 +38,7 @@ import java.io.IOException;
|
||||
*/
|
||||
public class SpanFirstJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "spanFirst";
|
||||
public static final String NAME = "span_first";
|
||||
|
||||
@Inject public SpanFirstJsonQueryParser(Index index, @IndexSettings Settings settings) {
|
||||
super(index, settings);
|
||||
|
@ -80,10 +80,10 @@ public class SpanNearJsonQueryBuilder extends BaseJsonQueryBuilder implements Js
|
||||
builder.endArray();
|
||||
builder.field("slop", slop);
|
||||
if (inOrder != null) {
|
||||
builder.field("inOrder", inOrder);
|
||||
builder.field("in_order", inOrder);
|
||||
}
|
||||
if (collectPayloads != null) {
|
||||
builder.field("collectPayloads", collectPayloads);
|
||||
builder.field("collect_payloads", collectPayloads);
|
||||
}
|
||||
if (boost != -1) {
|
||||
builder.field("boost", boost);
|
||||
|
@ -42,7 +42,7 @@ import static com.google.common.collect.Lists.*;
|
||||
*/
|
||||
public class SpanNearJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "spanNear";
|
||||
public static final String NAME = "span_near";
|
||||
|
||||
@Inject public SpanNearJsonQueryParser(Index index, @IndexSettings Settings settings) {
|
||||
super(index, settings);
|
||||
@ -78,9 +78,9 @@ public class SpanNearJsonQueryParser extends AbstractIndexComponent implements J
|
||||
}
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_STRING) {
|
||||
if ("inOrder".equals(currentFieldName)) {
|
||||
if ("in_order".equals(currentFieldName)) {
|
||||
inOrder = Booleans.parseBoolean(jp.getText(), inOrder);
|
||||
} else if ("collectPayloads".equals(currentFieldName)) {
|
||||
} else if ("collect_payloads".equals(currentFieldName)) {
|
||||
collectPayloads = Booleans.parseBoolean(jp.getText(), collectPayloads);
|
||||
} else if ("slop".equals(currentFieldName)) {
|
||||
slop = Integer.parseInt(jp.getText());
|
||||
@ -88,15 +88,15 @@ public class SpanNearJsonQueryParser extends AbstractIndexComponent implements J
|
||||
boost = Float.parseFloat(jp.getText());
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_FALSE || token == JsonToken.VALUE_TRUE) {
|
||||
if ("inOrder".equals(currentFieldName)) {
|
||||
if ("in_order".equals(currentFieldName)) {
|
||||
inOrder = token == JsonToken.VALUE_TRUE;
|
||||
} else if ("collectPayloads".equals(currentFieldName)) {
|
||||
} else if ("collect_payloads".equals(currentFieldName)) {
|
||||
collectPayloads = token == JsonToken.VALUE_TRUE;
|
||||
}
|
||||
} else if (token == JsonToken.VALUE_NUMBER_INT) {
|
||||
if ("inOrder".equals(currentFieldName)) {
|
||||
if ("in_order".equals(currentFieldName)) {
|
||||
inOrder = jp.getIntValue() != 0;
|
||||
} else if ("collectPayloads".equals(currentFieldName)) {
|
||||
} else if ("collect_payloads".equals(currentFieldName)) {
|
||||
collectPayloads = jp.getIntValue() != 0;
|
||||
} else if ("slop".equals(currentFieldName)) {
|
||||
slop = jp.getIntValue();
|
||||
@ -110,10 +110,10 @@ public class SpanNearJsonQueryParser extends AbstractIndexComponent implements J
|
||||
}
|
||||
}
|
||||
if (clauses.isEmpty()) {
|
||||
throw new QueryParsingException(index, "spanNear must include [clauses]");
|
||||
throw new QueryParsingException(index, "span_near must include [clauses]");
|
||||
}
|
||||
if (slop == -1) {
|
||||
throw new QueryParsingException(index, "spanNear must include [slop]");
|
||||
throw new QueryParsingException(index, "span_near must include [slop]");
|
||||
}
|
||||
|
||||
SpanNearQuery query = new SpanNearQuery(clauses.toArray(new SpanQuery[clauses.size()]), slop, inOrder, collectPayloads);
|
||||
|
@ -38,7 +38,7 @@ import java.io.IOException;
|
||||
*/
|
||||
public class SpanNotJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "spanNot";
|
||||
public static final String NAME = "span_not";
|
||||
|
||||
@Inject public SpanNotJsonQueryParser(Index index, @IndexSettings Settings settings) {
|
||||
super(index, settings);
|
||||
|
@ -41,7 +41,7 @@ import static com.google.common.collect.Lists.*;
|
||||
*/
|
||||
public class SpanOrJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "spanOr";
|
||||
public static final String NAME = "span_or";
|
||||
|
||||
@Inject public SpanOrJsonQueryParser(Index index, @IndexSettings Settings settings) {
|
||||
super(index, settings);
|
||||
|
@ -41,7 +41,7 @@ import static org.elasticsearch.index.query.support.QueryParsers.*;
|
||||
*/
|
||||
public class SpanTermJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
public static final String NAME = "spanTerm";
|
||||
public static final String NAME = "span_term";
|
||||
|
||||
@Inject public SpanTermJsonQueryParser(Index index, @IndexSettings Settings settings) {
|
||||
super(index, settings);
|
||||
|
@ -108,8 +108,8 @@ public class RecoveryAction extends AbstractIndexShardComponent implements Close
|
||||
snapshotTransportAction = shardId.index().name() + "/" + shardId.id() + "/recovery/snapshot";
|
||||
transportService.registerHandler(snapshotTransportAction, new SnapshotTransportRequestHandler());
|
||||
|
||||
this.fileChunkSize = componentSettings.getAsSize("fileChunkSize", new SizeValue(100, SizeUnit.KB));
|
||||
logger.trace("Recovery Action registered, using fileChunkSize[{}]", fileChunkSize);
|
||||
this.fileChunkSize = componentSettings.getAsSize("file_chunk_size", new SizeValue(100, SizeUnit.KB));
|
||||
logger.trace("Recovery Action registered, using file_chunk_size[{}]", fileChunkSize);
|
||||
}
|
||||
|
||||
public void close() {
|
||||
|
@ -60,10 +60,10 @@ public abstract class AbstractFsStore<T extends Directory> extends AbstractStore
|
||||
if (!cache) {
|
||||
return null;
|
||||
}
|
||||
SizeValue bufferSize = componentSettings.getAsSize("cache.bufferSize", new SizeValue(100, SizeUnit.KB));
|
||||
SizeValue cacheSize = componentSettings.getAsSize("cache.cacheSize", new SizeValue(20, SizeUnit.MB));
|
||||
SizeValue bufferSize = componentSettings.getAsSize("cache.buffer_size", new SizeValue(100, SizeUnit.KB));
|
||||
SizeValue cacheSize = componentSettings.getAsSize("cache.cache_size", new SizeValue(20, SizeUnit.MB));
|
||||
boolean direct = componentSettings.getAsBoolean("cache.direct", true);
|
||||
boolean warmCache = componentSettings.getAsBoolean("cache.warmCache", true);
|
||||
boolean warmCache = componentSettings.getAsBoolean("cache.warm_cache", true);
|
||||
|
||||
Directory memDir;
|
||||
if (direct) {
|
||||
|
@ -54,7 +54,7 @@ public class MmapFsStore extends AbstractFsStore<Directory> {
|
||||
@Inject public MmapFsStore(ShardId shardId, @IndexSettings Settings indexSettings, Environment environment, @LocalNodeId String localNodeId) throws IOException {
|
||||
super(shardId, indexSettings);
|
||||
// by default, we don't need to sync to disk, since we use the gateway
|
||||
this.syncToDisk = componentSettings.getAsBoolean("syncToDisk", false);
|
||||
this.syncToDisk = componentSettings.getAsBoolean("sync_to_disk", false);
|
||||
this.fsDirectory = new CustomMMapDirectory(createStoreFilePath(environment.workWithClusterFile(), localNodeId, shardId, MAIN_INDEX_SUFFIX), syncToDisk);
|
||||
|
||||
SwitchDirectory switchDirectory = buildSwitchDirectoryIfNeeded(fsDirectory);
|
||||
|
@ -52,7 +52,7 @@ public class NioFsStore extends AbstractFsStore<Directory> {
|
||||
@Inject public NioFsStore(ShardId shardId, @IndexSettings Settings indexSettings, Environment environment, @LocalNodeId String localNodeId) throws IOException {
|
||||
super(shardId, indexSettings);
|
||||
// by default, we don't need to sync to disk, since we use the gateway
|
||||
this.syncToDisk = componentSettings.getAsBoolean("syncToDisk", false);
|
||||
this.syncToDisk = componentSettings.getAsBoolean("sync_to_disk", false);
|
||||
this.fsDirectory = new CustomNioFSDirectory(createStoreFilePath(environment.workWithClusterFile(), localNodeId, shardId, MAIN_INDEX_SUFFIX), syncToDisk);
|
||||
|
||||
SwitchDirectory switchDirectory = buildSwitchDirectoryIfNeeded(fsDirectory);
|
||||
|
@ -52,7 +52,7 @@ public class SimpleFsStore extends AbstractFsStore<Directory> {
|
||||
@Inject public SimpleFsStore(ShardId shardId, @IndexSettings Settings indexSettings, Environment environment, @LocalNodeId String localNodeId) throws IOException {
|
||||
super(shardId, indexSettings);
|
||||
// by default, we don't need to sync to disk, since we use the gateway
|
||||
this.syncToDisk = componentSettings.getAsBoolean("syncToDisk", false);
|
||||
this.syncToDisk = componentSettings.getAsBoolean("sync_to_disk", false);
|
||||
this.fsDirectory = new CustomSimpleFSDirectory(createStoreFilePath(environment.workWithClusterFile(), localNodeId, shardId, MAIN_INDEX_SUFFIX), syncToDisk);
|
||||
|
||||
SwitchDirectory switchDirectory = buildSwitchDirectoryIfNeeded(fsDirectory);
|
||||
|
@ -45,12 +45,12 @@ public class ByteBufferStore extends AbstractStore<ByteBufferDirectory> {
|
||||
@Inject public ByteBufferStore(ShardId shardId, @IndexSettings Settings indexSettings) {
|
||||
super(shardId, indexSettings);
|
||||
|
||||
this.bufferSize = componentSettings.getAsSize("bufferSize", new SizeValue(100, SizeUnit.KB));
|
||||
this.cacheSize = componentSettings.getAsSize("cacheSize", new SizeValue(20, SizeUnit.MB));
|
||||
this.bufferSize = componentSettings.getAsSize("buffer_size", new SizeValue(100, SizeUnit.KB));
|
||||
this.cacheSize = componentSettings.getAsSize("cache_size", new SizeValue(20, SizeUnit.MB));
|
||||
this.direct = componentSettings.getAsBoolean("direct", true);
|
||||
this.warmCache = componentSettings.getAsBoolean("warmCache", true);
|
||||
this.warmCache = componentSettings.getAsBoolean("warm_cache", true);
|
||||
this.directory = new ByteBufferDirectory((int) bufferSize.bytes(), (int) cacheSize.bytes(), direct, warmCache);
|
||||
logger.debug("Using [ByteBuffer] Store with bufferSize[{}], cacheSize[{}], direct[{}], warmCache[{}]",
|
||||
logger.debug("Using [ByteBuffer] Store with buffer_size[{}], cache_size[{}], direct[{}], warm_cache[{}]",
|
||||
new Object[]{bufferSize, cacheSize, directory.isDirect(), warmCache});
|
||||
}
|
||||
|
||||
|
@ -43,12 +43,12 @@ public class HeapStore extends AbstractStore<HeapDirectory> {
|
||||
@Inject public HeapStore(ShardId shardId, @IndexSettings Settings indexSettings) {
|
||||
super(shardId, indexSettings);
|
||||
|
||||
this.bufferSize = componentSettings.getAsSize("bufferSize", new SizeValue(100, SizeUnit.KB));
|
||||
this.cacheSize = componentSettings.getAsSize("cacheSize", new SizeValue(20, SizeUnit.MB));
|
||||
this.warmCache = componentSettings.getAsBoolean("warmCache", true);
|
||||
this.bufferSize = componentSettings.getAsSize("buffer_size", new SizeValue(100, SizeUnit.KB));
|
||||
this.cacheSize = componentSettings.getAsSize("cache_size", new SizeValue(20, SizeUnit.MB));
|
||||
this.warmCache = componentSettings.getAsBoolean("warm_cache", true);
|
||||
|
||||
this.directory = new HeapDirectory(bufferSize, cacheSize, warmCache);
|
||||
logger.debug("Using [Memory] Store with bufferSize[{}], cacheSize[{}], warmCache[{}]",
|
||||
logger.debug("Using [Memory] Store with buffer_size[{}], cache_size[{}], warm_cache[{}]",
|
||||
new Object[]{directory.bufferSize(), directory.cacheSize(), warmCache});
|
||||
}
|
||||
|
||||
|
@ -145,7 +145,7 @@ public class IndicesMemoryCleaner extends AbstractComponent {
|
||||
}
|
||||
|
||||
@Override public String toString() {
|
||||
return "cleaned[" + cleaned + "], cleanedShards[" + cleanedShards + "], totalShards[" + totalShards + "]";
|
||||
return "cleaned[" + cleaned + "], cleaned_shards[" + cleanedShards + "], total_shards[" + totalShards + "]";
|
||||
}
|
||||
}
|
||||
|
||||
@ -179,7 +179,7 @@ public class IndicesMemoryCleaner extends AbstractComponent {
|
||||
}
|
||||
|
||||
@Override public String toString() {
|
||||
return "cleaned[" + cleaned + "], estimatedFlushableSize[" + estimatedFlushableSize + "], cleanedShards[" + cleanedShards + "], totalShards[" + totalShards + "]";
|
||||
return "cleaned[" + cleaned + "], estimated_flushable_size[" + estimatedFlushableSize + "], cleaned_shards[" + cleanedShards + "], total_shards[" + totalShards + "]";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ public class DumpMonitorService extends AbstractComponent {
|
||||
contSettings = settings.getGroups("monitor.dump");
|
||||
workFile = environment.workWithClusterFile();
|
||||
|
||||
this.dumpLocation = settings.get("dumpLocation");
|
||||
this.dumpLocation = settings.get("dump_location");
|
||||
|
||||
File dumpLocationFile;
|
||||
if (dumpLocation != null) {
|
||||
|
@ -39,7 +39,8 @@ import static org.elasticsearch.util.TimeValue.*;
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AlphaMemoryMonitor extends AbstractLifecycleComponent<MemoryMonitor> implements MemoryMonitor {
|
||||
public class
|
||||
AlphaMemoryMonitor extends AbstractLifecycleComponent<MemoryMonitor> implements MemoryMonitor {
|
||||
|
||||
private final double upperMemoryThreshold;
|
||||
|
||||
@ -75,15 +76,15 @@ public class AlphaMemoryMonitor extends AbstractLifecycleComponent<MemoryMonitor
|
||||
this.threadPool = threadPool;
|
||||
this.indicesMemoryCleaner = indicesMemoryCleaner;
|
||||
|
||||
this.upperMemoryThreshold = componentSettings.getAsDouble("upperMemoryThreshold", 0.8);
|
||||
this.lowerMemoryThreshold = componentSettings.getAsDouble("lowerMemoryThreshold", 0.5);
|
||||
this.upperMemoryThreshold = componentSettings.getAsDouble("upper_memory_threshold", 0.8);
|
||||
this.lowerMemoryThreshold = componentSettings.getAsDouble("lower_memory_threshold", 0.5);
|
||||
this.interval = componentSettings.getAsTime("interval", timeValueMillis(500));
|
||||
this.gcThreshold = componentSettings.getAsInt("gcThreshold", 5);
|
||||
this.cleanThreshold = componentSettings.getAsInt("cleanThreshold", 10);
|
||||
this.minimumFlushableSizeToClean = componentSettings.getAsSize("minimumFlushableSizeToClean", new SizeValue(5, SizeUnit.MB));
|
||||
this.translogNumberOfOperationsThreshold = componentSettings.getAsInt("translogNumberOfOperationsThreshold", 5000);
|
||||
this.gcThreshold = componentSettings.getAsInt("gc_threshold", 5);
|
||||
this.cleanThreshold = componentSettings.getAsInt("clean_threshold", 10);
|
||||
this.minimumFlushableSizeToClean = componentSettings.getAsSize("minimum_flushable_size_to_clean", new SizeValue(5, SizeUnit.MB));
|
||||
this.translogNumberOfOperationsThreshold = componentSettings.getAsInt("translog_number_of_operations_threshold", 5000);
|
||||
|
||||
logger.debug("Interval[" + interval + "], upperMemoryThreshold[" + upperMemoryThreshold + "], lowerMemoryThreshold[" + lowerMemoryThreshold + "], translogNumberOfOperationsThreshold[" + translogNumberOfOperationsThreshold + "]");
|
||||
logger.debug("interval[" + interval + "], upper_memory_threshold[" + upperMemoryThreshold + "], lower_memory_threshold[" + lowerMemoryThreshold + "], translog_number_of_operations_threshold[" + translogNumberOfOperationsThreshold + "]");
|
||||
|
||||
this.runtime = Runtime.getRuntime();
|
||||
this.maxMemory = new SizeValue(runtime.maxMemory());
|
||||
|
@ -42,8 +42,8 @@ public class JsonThrowableRestResponse extends JsonRestResponse {
|
||||
private static JsonBuilder convert(RestRequest request, Throwable t) throws IOException {
|
||||
JsonBuilder builder = binaryJsonBuilder().prettyPrint()
|
||||
.startObject().field("error", detailedMessage(t));
|
||||
if (t != null && request.paramAsBoolean("errorTrace", false)) {
|
||||
builder.startObject("errorTrace");
|
||||
if (t != null && request.paramAsBoolean("error_trace", false)) {
|
||||
builder.startObject("error_trace");
|
||||
boolean first = true;
|
||||
while (t != null) {
|
||||
if (!first) {
|
||||
@ -66,13 +66,13 @@ public class JsonThrowableRestResponse extends JsonRestResponse {
|
||||
builder.field("message", t.getMessage());
|
||||
for (StackTraceElement stElement : t.getStackTrace()) {
|
||||
builder.startObject("at")
|
||||
.field("className", stElement.getClassName())
|
||||
.field("methodName", stElement.getMethodName());
|
||||
.field("class", stElement.getClassName())
|
||||
.field("method", stElement.getMethodName());
|
||||
if (stElement.getFileName() != null) {
|
||||
builder.field("fileName", stElement.getFileName());
|
||||
builder.field("file", stElement.getFileName());
|
||||
}
|
||||
if (stElement.getLineNumber() >= 0) {
|
||||
builder.field("lineNumber", stElement.getLineNumber());
|
||||
builder.field("line", stElement.getLineNumber());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -51,11 +51,11 @@ public class RestClusterHealthAction extends BaseRestHandler {
|
||||
int level = 0;
|
||||
try {
|
||||
clusterHealthRequest.timeout(request.paramAsTime("timeout", clusterHealthRequest.timeout()));
|
||||
String waitForStatus = request.param("waitForStatus");
|
||||
String waitForStatus = request.param("wait_for_status");
|
||||
if (waitForStatus != null) {
|
||||
clusterHealthRequest.waitForStatus(ClusterHealthStatus.valueOf(waitForStatus.toUpperCase()));
|
||||
}
|
||||
clusterHealthRequest.waitForRelocatingShards(request.paramAsInt("waitForRelocatingShards", clusterHealthRequest.waitForRelocatingShards()));
|
||||
clusterHealthRequest.waitForRelocatingShards(request.paramAsInt("wait_for_relocating_shards", clusterHealthRequest.waitForRelocatingShards()));
|
||||
String sLevel = request.param("level");
|
||||
if (sLevel != null) {
|
||||
if ("cluster".equals("sLevel")) {
|
||||
@ -83,13 +83,13 @@ public class RestClusterHealthAction extends BaseRestHandler {
|
||||
builder.startObject();
|
||||
|
||||
builder.field("status", response.status().name().toLowerCase());
|
||||
builder.field("timedOut", response.timedOut());
|
||||
builder.field("activePrimaryShards", response.activePrimaryShards());
|
||||
builder.field("activeShards", response.activeShards());
|
||||
builder.field("relocatingShards", response.relocatingShards());
|
||||
builder.field("timed_out", response.timedOut());
|
||||
builder.field("active_primary_shards", response.activePrimaryShards());
|
||||
builder.field("active_shards", response.activeShards());
|
||||
builder.field("relocating_shards", response.relocatingShards());
|
||||
|
||||
if (!response.validationFailures().isEmpty()) {
|
||||
builder.startArray("validationFailures");
|
||||
builder.startArray("validation_failures");
|
||||
for (String validationFailure : response.validationFailures()) {
|
||||
builder.value(validationFailure);
|
||||
}
|
||||
@ -100,7 +100,7 @@ public class RestClusterHealthAction extends BaseRestHandler {
|
||||
builder.startObject(indexHealth.index());
|
||||
|
||||
if (!indexHealth.validationFailures().isEmpty()) {
|
||||
builder.startArray("validationFailures");
|
||||
builder.startArray("validation_failures");
|
||||
for (String validationFailure : indexHealth.validationFailures()) {
|
||||
builder.value(validationFailure);
|
||||
}
|
||||
@ -119,14 +119,14 @@ public class RestClusterHealthAction extends BaseRestHandler {
|
||||
builder.startObject(indexHealth.index());
|
||||
|
||||
builder.field("status", indexHealth.status().name().toLowerCase());
|
||||
builder.field("numberOfShards", indexHealth.numberOfShards());
|
||||
builder.field("numberOfReplicas", indexHealth.numberOfReplicas());
|
||||
builder.field("activePrimaryShards", indexHealth.activePrimaryShards());
|
||||
builder.field("activeShards", indexHealth.activeShards());
|
||||
builder.field("relocatingShards", indexHealth.relocatingShards());
|
||||
builder.field("number_of_shards", indexHealth.numberOfShards());
|
||||
builder.field("number_of_replicas", indexHealth.numberOfReplicas());
|
||||
builder.field("active_primary_shards", indexHealth.activePrimaryShards());
|
||||
builder.field("active_shards", indexHealth.activeShards());
|
||||
builder.field("relocating_shards", indexHealth.relocatingShards());
|
||||
|
||||
if (!indexHealth.validationFailures().isEmpty()) {
|
||||
builder.startArray("validationFailures");
|
||||
builder.startArray("validation_failures");
|
||||
for (String validationFailure : indexHealth.validationFailures()) {
|
||||
builder.value(validationFailure);
|
||||
}
|
||||
@ -140,9 +140,9 @@ public class RestClusterHealthAction extends BaseRestHandler {
|
||||
builder.startObject(Integer.toString(shardHealth.id()));
|
||||
|
||||
builder.field("status", shardHealth.status().name().toLowerCase());
|
||||
builder.field("primaryActive", shardHealth.primaryActive());
|
||||
builder.field("activeShards", shardHealth.activeShards());
|
||||
builder.field("relocatingShards", shardHealth.relocatingShards());
|
||||
builder.field("primary_active", shardHealth.primaryActive());
|
||||
builder.field("active_shards", shardHealth.activeShards());
|
||||
builder.field("relocating_shards", shardHealth.relocatingShards());
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user