Merge branch 'master' into feature/rank-eval
This commit is contained in:
commit
2fde4b2883
|
@ -238,7 +238,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]IncompatibleClusterStateVersionException.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]InternalClusterInfoService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]LocalNodeMasterListener.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]action[/\\]index[/\\]NodeIndexDeletedAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]action[/\\]index[/\\]NodeMappingRefreshAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]action[/\\]shard[/\\]ShardStateAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]block[/\\]ClusterBlock.java" checks="LineLength" />
|
||||
|
@ -415,7 +414,6 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]mapper[/\\]RootObjectMapper.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]merge[/\\]MergeStats.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]AbstractQueryBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]MatchQueryParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryBuilders.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]QueryValidationException.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]index[/\\]query[/\\]support[/\\]InnerHitsQueryParserHelper.java" checks="LineLength" />
|
||||
|
@ -488,15 +486,12 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestPendingClusterTasksAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestShardsAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]rest[/\\]action[/\\]cat[/\\]RestThreadPoolAction.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]AbstractScriptParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptContextRegistry.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptModes.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptParameterParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]ScriptSettings.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]script[/\\]Template.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]MultiValueMode.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]SearchService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]AggregatorFactories.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]InternalMultiBucketAggregation.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]ValuesSourceAggregationBuilder.java" checks="LineLength" />
|
||||
|
@ -555,29 +550,19 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]ValuesSourceParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]format[/\\]ValueFormat.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]aggregations[/\\]support[/\\]format[/\\]ValueParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]builder[/\\]SearchSourceBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]controller[/\\]SearchPhaseController.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]AggregatedDfs.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]dfs[/\\]DfsSearchResult.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchPhase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSearchResult.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhaseParseElement.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]DefaultSearchContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]FilteredSearchContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]InternalSearchHit.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]SearchContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]ShardSearchTransportRequest.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]FieldLookup.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafDocLookup.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]lookup[/\\]LeafFieldsLookup.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]query[/\\]QueryPhase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]QueryRescorer.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]rescore[/\\]RescoreParseElement.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]GeoDistanceSortParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]ScriptSortParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]sort[/\\]SortParseElement.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestContextParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]SuggestUtils.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]CompletionSuggestParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]CategoryContextMapping.java" checks="LineLength" />
|
||||
|
@ -586,9 +571,7 @@
|
|||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]completion[/\\]context[/\\]GeoQueryContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]CandidateScorer.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]NoisyChannelSpellChecker.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]PhraseSuggestParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]phrase[/\\]WordScorer.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]suggest[/\\]term[/\\]TermSuggestParser.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]RestoreService.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotShardFailure.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]snapshots[/\\]SnapshotShardsService.java" checks="LineLength" />
|
||||
|
@ -952,7 +935,6 @@
|
|||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]basic[/\\]TransportTwoNodesSearchIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ChildQuerySearchIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]child[/\\]ParentFieldLoadingIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhasePluginIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]geo[/\\]GeoBoundingBoxIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]geo[/\\]GeoFilterIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]geo[/\\]GeoShapeQueryTests.java" checks="LineLength" />
|
||||
|
@ -1007,7 +989,6 @@
|
|||
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]MultiPercolateRequestBuilder.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolateShardResponse.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]TransportMultiPercolateAction.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]TransportPercolateAction.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]percolator[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]TransportShardMultiPercolateAction.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]percolator[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]MultiPercolatorIT.java" checks="LineLength" />
|
||||
<suppress files="modules[/\\]percolator[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]percolator[/\\]PercolatorIT.java" checks="LineLength" />
|
||||
|
@ -1084,7 +1065,6 @@
|
|||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]IndexSettingsModule.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]InternalTestCluster.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]MockIndexEventListener.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]TestSearchContext.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]cluster[/\\]NoopClusterService.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]cluster[/\\]TestClusterService.java" checks="LineLength" />
|
||||
<suppress files="test[/\\]framework[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]test[/\\]discovery[/\\]ClusterDiscoveryConfiguration.java" checks="LineLength" />
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
elasticsearch = 5.0.0-alpha6
|
||||
elasticsearch = 6.0.0-alpha1
|
||||
lucene = 6.2.0
|
||||
|
||||
# optional dependencies
|
||||
|
|
|
@ -0,0 +1,392 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache license, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the license for the specific language governing permissions and
|
||||
* limitations under the license.
|
||||
*/
|
||||
package org.apache.logging.log4j.core.jmx;
|
||||
|
||||
import java.lang.management.ManagementFactory;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import javax.management.InstanceAlreadyExistsException;
|
||||
import javax.management.MBeanRegistrationException;
|
||||
import javax.management.MBeanServer;
|
||||
import javax.management.NotCompliantMBeanException;
|
||||
import javax.management.ObjectName;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.core.Appender;
|
||||
import org.apache.logging.log4j.core.LoggerContext;
|
||||
import org.apache.logging.log4j.core.appender.AsyncAppender;
|
||||
import org.apache.logging.log4j.core.async.AsyncLoggerConfig;
|
||||
import org.apache.logging.log4j.core.async.AsyncLoggerContext;
|
||||
import org.apache.logging.log4j.core.async.DaemonThreadFactory;
|
||||
import org.apache.logging.log4j.core.config.LoggerConfig;
|
||||
import org.apache.logging.log4j.core.impl.Log4jContextFactory;
|
||||
import org.apache.logging.log4j.core.selector.ContextSelector;
|
||||
import org.apache.logging.log4j.core.util.Constants;
|
||||
import org.apache.logging.log4j.spi.LoggerContextFactory;
|
||||
import org.apache.logging.log4j.status.StatusLogger;
|
||||
import org.apache.logging.log4j.util.PropertiesUtil;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
|
||||
/**
|
||||
* Creates MBeans to instrument various classes in the log4j class hierarchy.
|
||||
* <p>
|
||||
* All instrumentation for Log4j 2 classes can be disabled by setting system property {@code -Dlog4j2.disable.jmx=true}.
|
||||
* </p>
|
||||
*/
|
||||
@SuppressForbidden(reason = "copied class to hack around Log4j bug")
|
||||
public final class Server {
|
||||
|
||||
/**
|
||||
* The domain part, or prefix ({@value}) of the {@code ObjectName} of all MBeans that instrument Log4J2 components.
|
||||
*/
|
||||
public static final String DOMAIN = "org.apache.logging.log4j2";
|
||||
private static final String PROPERTY_DISABLE_JMX = "log4j2.disable.jmx";
|
||||
private static final String PROPERTY_ASYNC_NOTIF = "log4j2.jmx.notify.async";
|
||||
private static final String THREAD_NAME_PREFIX = "log4j2.jmx.notif";
|
||||
private static final StatusLogger LOGGER = StatusLogger.getLogger();
|
||||
static final Executor executor = isJmxDisabled() ? null : createExecutor();
|
||||
|
||||
private Server() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns either a {@code null} Executor (causing JMX notifications to be sent from the caller thread) or a daemon
|
||||
* background thread Executor, depending on the value of system property "log4j2.jmx.notify.async". If this
|
||||
* property is not set, use a {@code null} Executor for web apps to avoid memory leaks and other issues when the
|
||||
* web app is restarted.
|
||||
* @see <a href="https://issues.apache.org/jira/browse/LOG4J2-938">LOG4J2-938</a>
|
||||
*/
|
||||
private static ExecutorService createExecutor() {
|
||||
final boolean defaultAsync = !Constants.IS_WEB_APP;
|
||||
final boolean async = PropertiesUtil.getProperties().getBooleanProperty(PROPERTY_ASYNC_NOTIF, defaultAsync);
|
||||
return async ? Executors.newFixedThreadPool(1, new DaemonThreadFactory(THREAD_NAME_PREFIX)) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Either returns the specified name as is, or returns a quoted value containing the specified name with the special
|
||||
* characters (comma, equals, colon, quote, asterisk, or question mark) preceded with a backslash.
|
||||
*
|
||||
* @param name the name to escape so it can be used as a value in an {@link ObjectName}.
|
||||
* @return the escaped name
|
||||
*/
|
||||
public static String escape(final String name) {
|
||||
final StringBuilder sb = new StringBuilder(name.length() * 2);
|
||||
boolean needsQuotes = false;
|
||||
for (int i = 0; i < name.length(); i++) {
|
||||
final char c = name.charAt(i);
|
||||
switch (c) {
|
||||
case '\\':
|
||||
case '*':
|
||||
case '?':
|
||||
case '\"':
|
||||
// quote, star, question & backslash must be escaped
|
||||
sb.append('\\');
|
||||
needsQuotes = true; // ... and can only appear in quoted value
|
||||
break;
|
||||
case ',':
|
||||
case '=':
|
||||
case ':':
|
||||
// no need to escape these, but value must be quoted
|
||||
needsQuotes = true;
|
||||
break;
|
||||
case '\r':
|
||||
// drop \r characters: \\r gives "invalid escape sequence"
|
||||
continue;
|
||||
case '\n':
|
||||
// replace \n characters with \\n sequence
|
||||
sb.append("\\n");
|
||||
needsQuotes = true;
|
||||
continue;
|
||||
}
|
||||
sb.append(c);
|
||||
}
|
||||
if (needsQuotes) {
|
||||
sb.insert(0, '\"');
|
||||
sb.append('\"');
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
private static boolean isJmxDisabled() {
|
||||
return PropertiesUtil.getProperties().getBooleanProperty(PROPERTY_DISABLE_JMX);
|
||||
}
|
||||
|
||||
public static void reregisterMBeansAfterReconfigure() {
|
||||
// avoid creating Platform MBean Server if JMX disabled
|
||||
if (isJmxDisabled()) {
|
||||
LOGGER.debug("JMX disabled for log4j2. Not registering MBeans.");
|
||||
return;
|
||||
}
|
||||
final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
|
||||
reregisterMBeansAfterReconfigure(mbs);
|
||||
}
|
||||
|
||||
public static void reregisterMBeansAfterReconfigure(final MBeanServer mbs) {
|
||||
if (isJmxDisabled()) {
|
||||
LOGGER.debug("JMX disabled for log4j2. Not registering MBeans.");
|
||||
return;
|
||||
}
|
||||
|
||||
// now provide instrumentation for the newly configured
|
||||
// LoggerConfigs and Appenders
|
||||
try {
|
||||
final ContextSelector selector = getContextSelector();
|
||||
if (selector == null) {
|
||||
LOGGER.debug("Could not register MBeans: no ContextSelector found.");
|
||||
return;
|
||||
}
|
||||
LOGGER.trace("Reregistering MBeans after reconfigure. Selector={}", selector);
|
||||
final List<LoggerContext> contexts = selector.getLoggerContexts();
|
||||
int i = 0;
|
||||
for (final LoggerContext ctx : contexts) {
|
||||
LOGGER.trace("Reregistering context ({}/{}): '{}' {}", ++i, contexts.size(), ctx.getName(), ctx);
|
||||
// first unregister the context and all nested loggers,
|
||||
// appenders, statusLogger, contextSelector, ringbuffers...
|
||||
unregisterLoggerContext(ctx.getName(), mbs);
|
||||
|
||||
final LoggerContextAdmin mbean = new LoggerContextAdmin(ctx, executor);
|
||||
register(mbs, mbean, mbean.getObjectName());
|
||||
|
||||
if (ctx instanceof AsyncLoggerContext) {
|
||||
final RingBufferAdmin rbmbean = ((AsyncLoggerContext) ctx).createRingBufferAdmin();
|
||||
if (rbmbean.getBufferSize() > 0) {
|
||||
// don't register if Disruptor not started (DefaultConfiguration: config not found)
|
||||
register(mbs, rbmbean, rbmbean.getObjectName());
|
||||
}
|
||||
}
|
||||
|
||||
// register the status logger and the context selector
|
||||
// repeatedly
|
||||
// for each known context: if one context is unregistered,
|
||||
// these MBeans should still be available for the other
|
||||
// contexts.
|
||||
registerStatusLogger(ctx.getName(), mbs, executor);
|
||||
registerContextSelector(ctx.getName(), selector, mbs, executor);
|
||||
|
||||
registerLoggerConfigs(ctx, mbs, executor);
|
||||
registerAppenders(ctx, mbs, executor);
|
||||
}
|
||||
} catch (final Exception ex) {
|
||||
LOGGER.error("Could not register mbeans", ex);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister all log4j MBeans from the platform MBean server.
|
||||
*/
|
||||
public static void unregisterMBeans() {
|
||||
if (isJmxDisabled()) {
|
||||
LOGGER.debug("JMX disabled for Log4j2. Not unregistering MBeans.");
|
||||
return;
|
||||
}
|
||||
final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
|
||||
unregisterMBeans(mbs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregister all log4j MBeans from the specified MBean server.
|
||||
*
|
||||
* @param mbs the MBean server to unregister from.
|
||||
*/
|
||||
public static void unregisterMBeans(final MBeanServer mbs) {
|
||||
unregisterStatusLogger("*", mbs);
|
||||
unregisterContextSelector("*", mbs);
|
||||
unregisterContexts(mbs);
|
||||
unregisterLoggerConfigs("*", mbs);
|
||||
unregisterAsyncLoggerRingBufferAdmins("*", mbs);
|
||||
unregisterAsyncLoggerConfigRingBufferAdmins("*", mbs);
|
||||
unregisterAppenders("*", mbs);
|
||||
unregisterAsyncAppenders("*", mbs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@code ContextSelector} of the current {@code Log4jContextFactory}.
|
||||
*
|
||||
* @return the {@code ContextSelector} of the current {@code Log4jContextFactory}
|
||||
*/
|
||||
private static ContextSelector getContextSelector() {
|
||||
final LoggerContextFactory factory = LogManager.getFactory();
|
||||
if (factory instanceof Log4jContextFactory) {
|
||||
final ContextSelector selector = ((Log4jContextFactory) factory).getSelector();
|
||||
return selector;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregisters all MBeans associated with the specified logger context (including MBeans for {@code LoggerConfig}s
|
||||
* and {@code Appender}s from the platform MBean server.
|
||||
*
|
||||
* @param loggerContextName name of the logger context to unregister
|
||||
*/
|
||||
public static void unregisterLoggerContext(final String loggerContextName) {
|
||||
if (isJmxDisabled()) {
|
||||
LOGGER.debug("JMX disabled for Log4j2. Not unregistering MBeans.");
|
||||
return;
|
||||
}
|
||||
final MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
|
||||
unregisterLoggerContext(loggerContextName, mbs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Unregisters all MBeans associated with the specified logger context (including MBeans for {@code LoggerConfig}s
|
||||
* and {@code Appender}s from the platform MBean server.
|
||||
*
|
||||
* @param contextName name of the logger context to unregister
|
||||
* @param mbs the MBean Server to unregister the instrumented objects from
|
||||
*/
|
||||
public static void unregisterLoggerContext(final String contextName, final MBeanServer mbs) {
|
||||
final String pattern = LoggerContextAdminMBean.PATTERN;
|
||||
final String search = String.format(pattern, escape(contextName), "*");
|
||||
unregisterAllMatching(search, mbs); // unregister context mbean
|
||||
|
||||
// now unregister all MBeans associated with this logger context
|
||||
unregisterStatusLogger(contextName, mbs);
|
||||
unregisterContextSelector(contextName, mbs);
|
||||
unregisterLoggerConfigs(contextName, mbs);
|
||||
unregisterAppenders(contextName, mbs);
|
||||
unregisterAsyncAppenders(contextName, mbs);
|
||||
unregisterAsyncLoggerRingBufferAdmins(contextName, mbs);
|
||||
unregisterAsyncLoggerConfigRingBufferAdmins(contextName, mbs);
|
||||
}
|
||||
|
||||
private static void registerStatusLogger(final String contextName, final MBeanServer mbs, final Executor executor)
|
||||
throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException {
|
||||
|
||||
final StatusLoggerAdmin mbean = new StatusLoggerAdmin(contextName, executor);
|
||||
register(mbs, mbean, mbean.getObjectName());
|
||||
}
|
||||
|
||||
private static void registerContextSelector(final String contextName, final ContextSelector selector,
|
||||
final MBeanServer mbs, final Executor executor) throws InstanceAlreadyExistsException,
|
||||
MBeanRegistrationException, NotCompliantMBeanException {
|
||||
|
||||
final ContextSelectorAdmin mbean = new ContextSelectorAdmin(contextName, selector);
|
||||
register(mbs, mbean, mbean.getObjectName());
|
||||
}
|
||||
|
||||
private static void unregisterStatusLogger(final String contextName, final MBeanServer mbs) {
|
||||
final String pattern = StatusLoggerAdminMBean.PATTERN;
|
||||
final String search = String.format(pattern, escape(contextName), "*");
|
||||
unregisterAllMatching(search, mbs);
|
||||
}
|
||||
|
||||
private static void unregisterContextSelector(final String contextName, final MBeanServer mbs) {
|
||||
final String pattern = ContextSelectorAdminMBean.PATTERN;
|
||||
final String search = String.format(pattern, escape(contextName), "*");
|
||||
unregisterAllMatching(search, mbs);
|
||||
}
|
||||
|
||||
private static void unregisterLoggerConfigs(final String contextName, final MBeanServer mbs) {
|
||||
final String pattern = LoggerConfigAdminMBean.PATTERN;
|
||||
final String search = String.format(pattern, escape(contextName), "*");
|
||||
unregisterAllMatching(search, mbs);
|
||||
}
|
||||
|
||||
private static void unregisterContexts(final MBeanServer mbs) {
|
||||
final String pattern = LoggerContextAdminMBean.PATTERN;
|
||||
final String search = String.format(pattern, "*");
|
||||
unregisterAllMatching(search, mbs);
|
||||
}
|
||||
|
||||
private static void unregisterAppenders(final String contextName, final MBeanServer mbs) {
|
||||
final String pattern = AppenderAdminMBean.PATTERN;
|
||||
final String search = String.format(pattern, escape(contextName), "*");
|
||||
unregisterAllMatching(search, mbs);
|
||||
}
|
||||
|
||||
private static void unregisterAsyncAppenders(final String contextName, final MBeanServer mbs) {
|
||||
final String pattern = AsyncAppenderAdminMBean.PATTERN;
|
||||
final String search = String.format(pattern, escape(contextName), "*");
|
||||
unregisterAllMatching(search, mbs);
|
||||
}
|
||||
|
||||
private static void unregisterAsyncLoggerRingBufferAdmins(final String contextName, final MBeanServer mbs) {
|
||||
final String pattern1 = RingBufferAdminMBean.PATTERN_ASYNC_LOGGER;
|
||||
final String search1 = String.format(pattern1, escape(contextName));
|
||||
unregisterAllMatching(search1, mbs);
|
||||
}
|
||||
|
||||
private static void unregisterAsyncLoggerConfigRingBufferAdmins(final String contextName, final MBeanServer mbs) {
|
||||
final String pattern2 = RingBufferAdminMBean.PATTERN_ASYNC_LOGGER_CONFIG;
|
||||
final String search2 = String.format(pattern2, escape(contextName), "*");
|
||||
unregisterAllMatching(search2, mbs);
|
||||
}
|
||||
|
||||
private static void unregisterAllMatching(final String search, final MBeanServer mbs) {
|
||||
try {
|
||||
final ObjectName pattern = new ObjectName(search);
|
||||
final Set<ObjectName> found = mbs.queryNames(pattern, null);
|
||||
if (found.isEmpty()) {
|
||||
LOGGER.trace("Unregistering but no MBeans found matching '{}'", search);
|
||||
} else {
|
||||
LOGGER.trace("Unregistering {} MBeans: {}", found.size(), found);
|
||||
}
|
||||
for (final ObjectName objectName : found) {
|
||||
mbs.unregisterMBean(objectName);
|
||||
}
|
||||
} catch (final Exception ex) {
|
||||
LOGGER.error("Could not unregister MBeans for " + search, ex);
|
||||
}
|
||||
}
|
||||
|
||||
private static void registerLoggerConfigs(final LoggerContext ctx, final MBeanServer mbs, final Executor executor)
|
||||
throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException {
|
||||
|
||||
final Map<String, LoggerConfig> map = ctx.getConfiguration().getLoggers();
|
||||
for (final String name : map.keySet()) {
|
||||
final LoggerConfig cfg = map.get(name);
|
||||
final LoggerConfigAdmin mbean = new LoggerConfigAdmin(ctx, cfg);
|
||||
register(mbs, mbean, mbean.getObjectName());
|
||||
|
||||
if (cfg instanceof AsyncLoggerConfig) {
|
||||
final AsyncLoggerConfig async = (AsyncLoggerConfig) cfg;
|
||||
final RingBufferAdmin rbmbean = async.createRingBufferAdmin(ctx.getName());
|
||||
register(mbs, rbmbean, rbmbean.getObjectName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void registerAppenders(final LoggerContext ctx, final MBeanServer mbs, final Executor executor)
|
||||
throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException {
|
||||
|
||||
final Map<String, Appender> map = ctx.getConfiguration().getAppenders();
|
||||
for (final String name : map.keySet()) {
|
||||
final Appender appender = map.get(name);
|
||||
|
||||
if (appender instanceof AsyncAppender) {
|
||||
final AsyncAppender async = ((AsyncAppender) appender);
|
||||
final AsyncAppenderAdmin mbean = new AsyncAppenderAdmin(ctx.getName(), async);
|
||||
register(mbs, mbean, mbean.getObjectName());
|
||||
} else {
|
||||
final AppenderAdmin mbean = new AppenderAdmin(ctx.getName(), appender);
|
||||
register(mbs, mbean, mbean.getObjectName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void register(final MBeanServer mbs, final Object mbean, final ObjectName objectName)
|
||||
throws InstanceAlreadyExistsException, MBeanRegistrationException, NotCompliantMBeanException {
|
||||
LOGGER.debug("Registering MBean {}", objectName);
|
||||
mbs.registerMBean(mbean, objectName);
|
||||
}
|
||||
}
|
|
@ -87,7 +87,9 @@ public class Version {
|
|||
public static final Version V_5_0_0_alpha5 = new Version(V_5_0_0_alpha5_ID, org.apache.lucene.util.Version.LUCENE_6_1_0);
|
||||
public static final int V_5_0_0_alpha6_ID = 5000006;
|
||||
public static final Version V_5_0_0_alpha6 = new Version(V_5_0_0_alpha6_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
|
||||
public static final Version CURRENT = V_5_0_0_alpha6;
|
||||
public static final int V_6_0_0_alpha1_ID = 6000001;
|
||||
public static final Version V_6_0_0_alpha1 = new Version(V_6_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_2_0);
|
||||
public static final Version CURRENT = V_6_0_0_alpha1;
|
||||
|
||||
static {
|
||||
assert CURRENT.luceneVersion.equals(org.apache.lucene.util.Version.LATEST) : "Version must be upgraded to ["
|
||||
|
@ -100,6 +102,8 @@ public class Version {
|
|||
|
||||
public static Version fromId(int id) {
|
||||
switch (id) {
|
||||
case V_6_0_0_alpha1_ID:
|
||||
return V_6_0_0_alpha1;
|
||||
case V_5_0_0_alpha6_ID:
|
||||
return V_5_0_0_alpha6;
|
||||
case V_5_0_0_alpha5_ID:
|
||||
|
|
|
@ -211,30 +211,16 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
|||
if (in.readBoolean()) {
|
||||
indices = NodeIndicesStats.readIndicesStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
os = new OsStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
process = ProcessStats.readProcessStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
jvm = JvmStats.readJvmStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
threadPool = ThreadPoolStats.readThreadPoolStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
fs = new FsInfo(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
transport = TransportStats.readTransportStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
http = HttpStats.readHttpStats(in);
|
||||
}
|
||||
breaker = AllCircuitBreakerStats.readOptionalAllCircuitBreakerStats(in);
|
||||
scriptStats = in.readOptionalStreamable(ScriptStats::new);
|
||||
discoveryStats = in.readOptionalStreamable(() -> new DiscoveryStats(null));
|
||||
os = in.readOptionalWriteable(OsStats::new);
|
||||
process = in.readOptionalWriteable(ProcessStats::new);
|
||||
jvm = in.readOptionalWriteable(JvmStats::new);
|
||||
threadPool = in.readOptionalWriteable(ThreadPoolStats::new);
|
||||
fs = in.readOptionalWriteable(FsInfo::new);
|
||||
transport = in.readOptionalWriteable(TransportStats::new);
|
||||
http = in.readOptionalWriteable(HttpStats::new);
|
||||
breaker = in.readOptionalWriteable(AllCircuitBreakerStats::new);
|
||||
scriptStats = in.readOptionalWriteable(ScriptStats::new);
|
||||
discoveryStats = in.readOptionalWriteable(DiscoveryStats::new);
|
||||
ingestStats = in.readOptionalWriteable(IngestStats::new);
|
||||
}
|
||||
|
||||
|
@ -248,51 +234,16 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
|||
out.writeBoolean(true);
|
||||
indices.writeTo(out);
|
||||
}
|
||||
if (os == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
os.writeTo(out);
|
||||
}
|
||||
if (process == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
process.writeTo(out);
|
||||
}
|
||||
if (jvm == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
jvm.writeTo(out);
|
||||
}
|
||||
if (threadPool == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
threadPool.writeTo(out);
|
||||
}
|
||||
if (fs == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
fs.writeTo(out);
|
||||
}
|
||||
if (transport == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
transport.writeTo(out);
|
||||
}
|
||||
if (http == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
http.writeTo(out);
|
||||
}
|
||||
out.writeOptionalStreamable(breaker);
|
||||
out.writeOptionalStreamable(scriptStats);
|
||||
out.writeOptionalStreamable(discoveryStats);
|
||||
out.writeOptionalWriteable(os);
|
||||
out.writeOptionalWriteable(process);
|
||||
out.writeOptionalWriteable(jvm);
|
||||
out.writeOptionalWriteable(threadPool);
|
||||
out.writeOptionalWriteable(fs);
|
||||
out.writeOptionalWriteable(transport);
|
||||
out.writeOptionalWriteable(http);
|
||||
out.writeOptionalWriteable(breaker);
|
||||
out.writeOptionalWriteable(scriptStats);
|
||||
out.writeOptionalWriteable(discoveryStats);
|
||||
out.writeOptionalWriteable(ingestStats);
|
||||
}
|
||||
|
||||
|
@ -318,11 +269,9 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
|||
builder.endObject();
|
||||
}
|
||||
}
|
||||
|
||||
if (getIndices() != null) {
|
||||
getIndices().toXContent(builder, params);
|
||||
}
|
||||
|
||||
if (getOs() != null) {
|
||||
getOs().toXContent(builder, params);
|
||||
}
|
||||
|
@ -350,15 +299,12 @@ public class NodeStats extends BaseNodeResponse implements ToXContent {
|
|||
if (getScriptStats() != null) {
|
||||
getScriptStats().toXContent(builder, params);
|
||||
}
|
||||
|
||||
if (getDiscoveryStats() != null) {
|
||||
getDiscoveryStats().toXContent(builder, params);
|
||||
}
|
||||
|
||||
if (getIngestStats() != null) {
|
||||
getIngestStats().toXContent(builder, params);
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -268,7 +268,7 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
indices = CommonStatsFlags.readCommonStatsFlags(in);
|
||||
indices = new CommonStatsFlags(in);
|
||||
os = in.readBoolean();
|
||||
process = in.readBoolean();
|
||||
jvm = in.readBoolean();
|
||||
|
@ -298,5 +298,4 @@ public class NodesStatsRequest extends BaseNodesRequest<NodesStatsRequest> {
|
|||
out.writeBoolean(discovery);
|
||||
out.writeBoolean(ingest);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -83,7 +83,7 @@ public class ClusterStatsNodes implements ToXContent {
|
|||
continue;
|
||||
}
|
||||
if (nodeResponse.nodeStats().getFs() != null) {
|
||||
this.fs.add(nodeResponse.nodeStats().getFs().total());
|
||||
this.fs.add(nodeResponse.nodeStats().getFs().getTotal());
|
||||
}
|
||||
}
|
||||
this.counts = new Counts(nodeInfos);
|
||||
|
|
|
@ -292,7 +292,7 @@ public class DetailAnalyzeResponse implements Streamable, ToXContent {
|
|||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(Fields.NAME, name);
|
||||
builder.field(Fields.FILTERED_TEXT, texts);
|
||||
builder.array(Fields.FILTERED_TEXT, texts);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.action.admin.indices.stats;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -32,13 +32,13 @@ import org.elasticsearch.index.engine.SegmentsStats;
|
|||
import org.elasticsearch.index.fielddata.FieldDataStats;
|
||||
import org.elasticsearch.index.flush.FlushStats;
|
||||
import org.elasticsearch.index.get.GetStats;
|
||||
import org.elasticsearch.index.shard.IndexingStats;
|
||||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.recovery.RecoveryStats;
|
||||
import org.elasticsearch.index.refresh.RefreshStats;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
import org.elasticsearch.index.shard.DocsStats;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.IndexingStats;
|
||||
import org.elasticsearch.index.store.StoreStats;
|
||||
import org.elasticsearch.index.translog.TranslogStats;
|
||||
import org.elasticsearch.index.warmer.WarmerStats;
|
||||
|
@ -47,9 +47,55 @@ import org.elasticsearch.search.suggest.completion.CompletionStats;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class CommonStats implements Streamable, ToXContent {
|
||||
public class CommonStats implements Writeable, ToXContent {
|
||||
|
||||
@Nullable
|
||||
public DocsStats docs;
|
||||
|
||||
@Nullable
|
||||
public StoreStats store;
|
||||
|
||||
@Nullable
|
||||
public IndexingStats indexing;
|
||||
|
||||
@Nullable
|
||||
public GetStats get;
|
||||
|
||||
@Nullable
|
||||
public SearchStats search;
|
||||
|
||||
@Nullable
|
||||
public MergeStats merge;
|
||||
|
||||
@Nullable
|
||||
public RefreshStats refresh;
|
||||
|
||||
@Nullable
|
||||
public FlushStats flush;
|
||||
|
||||
@Nullable
|
||||
public WarmerStats warmer;
|
||||
|
||||
@Nullable
|
||||
public QueryCacheStats queryCache;
|
||||
|
||||
@Nullable
|
||||
public FieldDataStats fieldData;
|
||||
|
||||
@Nullable
|
||||
public CompletionStats completion;
|
||||
|
||||
@Nullable
|
||||
public SegmentsStats segments;
|
||||
|
||||
@Nullable
|
||||
public TranslogStats translog;
|
||||
|
||||
@Nullable
|
||||
public RequestCacheStats requestCache;
|
||||
|
||||
@Nullable
|
||||
public RecoveryStats recoveryStats;
|
||||
|
||||
public CommonStats() {
|
||||
this(CommonStatsFlags.NONE);
|
||||
|
@ -117,11 +163,8 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
public CommonStats(IndicesQueryCache indicesQueryCache, IndexShard indexShard, CommonStatsFlags flags) {
|
||||
|
||||
CommonStatsFlags.Flag[] setFlags = flags.getFlags();
|
||||
|
||||
for (CommonStatsFlags.Flag flag : setFlags) {
|
||||
switch (flag) {
|
||||
case Docs:
|
||||
|
@ -181,53 +224,135 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public DocsStats docs;
|
||||
public CommonStats(StreamInput in) throws IOException {
|
||||
if (in.readBoolean()) {
|
||||
docs = DocsStats.readDocStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
store = StoreStats.readStoreStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
indexing = IndexingStats.readIndexingStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
get = GetStats.readGetStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
search = SearchStats.readSearchStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
merge = MergeStats.readMergeStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
refresh = RefreshStats.readRefreshStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
flush = FlushStats.readFlushStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
warmer = WarmerStats.readWarmerStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
queryCache = QueryCacheStats.readQueryCacheStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
fieldData = FieldDataStats.readFieldDataStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
completion = CompletionStats.readCompletionStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
segments = SegmentsStats.readSegmentsStats(in);
|
||||
}
|
||||
translog = in.readOptionalStreamable(TranslogStats::new);
|
||||
requestCache = in.readOptionalStreamable(RequestCacheStats::new);
|
||||
recoveryStats = in.readOptionalStreamable(RecoveryStats::new);
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public StoreStats store;
|
||||
|
||||
@Nullable
|
||||
public IndexingStats indexing;
|
||||
|
||||
@Nullable
|
||||
public GetStats get;
|
||||
|
||||
@Nullable
|
||||
public SearchStats search;
|
||||
|
||||
@Nullable
|
||||
public MergeStats merge;
|
||||
|
||||
@Nullable
|
||||
public RefreshStats refresh;
|
||||
|
||||
@Nullable
|
||||
public FlushStats flush;
|
||||
|
||||
@Nullable
|
||||
public WarmerStats warmer;
|
||||
|
||||
@Nullable
|
||||
public QueryCacheStats queryCache;
|
||||
|
||||
@Nullable
|
||||
public FieldDataStats fieldData;
|
||||
|
||||
@Nullable
|
||||
public CompletionStats completion;
|
||||
|
||||
@Nullable
|
||||
public SegmentsStats segments;
|
||||
|
||||
@Nullable
|
||||
public TranslogStats translog;
|
||||
|
||||
@Nullable
|
||||
public RequestCacheStats requestCache;
|
||||
|
||||
@Nullable
|
||||
public RecoveryStats recoveryStats;
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
if (docs == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
docs.writeTo(out);
|
||||
}
|
||||
if (store == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
store.writeTo(out);
|
||||
}
|
||||
if (indexing == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
indexing.writeTo(out);
|
||||
}
|
||||
if (get == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
get.writeTo(out);
|
||||
}
|
||||
if (search == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
search.writeTo(out);
|
||||
}
|
||||
if (merge == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
merge.writeTo(out);
|
||||
}
|
||||
if (refresh == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
refresh.writeTo(out);
|
||||
}
|
||||
if (flush == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
flush.writeTo(out);
|
||||
}
|
||||
if (warmer == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
warmer.writeTo(out);
|
||||
}
|
||||
if (queryCache == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
queryCache.writeTo(out);
|
||||
}
|
||||
if (fieldData == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
fieldData.writeTo(out);
|
||||
}
|
||||
if (completion == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
completion.writeTo(out);
|
||||
}
|
||||
if (segments == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
segments.writeTo(out);
|
||||
}
|
||||
out.writeOptionalStreamable(translog);
|
||||
out.writeOptionalStreamable(requestCache);
|
||||
out.writeOptionalStreamable(recoveryStats);
|
||||
}
|
||||
|
||||
public void add(CommonStats stats) {
|
||||
if (docs == null) {
|
||||
|
@ -441,12 +566,6 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
return recoveryStats;
|
||||
}
|
||||
|
||||
public static CommonStats readCommonStats(StreamInput in) throws IOException {
|
||||
CommonStats stats = new CommonStats();
|
||||
stats.readFrom(in);
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility method which computes total memory by adding
|
||||
* FieldData, PercolatorCache, Segments (memory, index writer, version map)
|
||||
|
@ -468,137 +587,6 @@ public class CommonStats implements Streamable, ToXContent {
|
|||
return new ByteSizeValue(size);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
if (in.readBoolean()) {
|
||||
docs = DocsStats.readDocStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
store = StoreStats.readStoreStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
indexing = IndexingStats.readIndexingStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
get = GetStats.readGetStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
search = SearchStats.readSearchStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
merge = MergeStats.readMergeStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
refresh = RefreshStats.readRefreshStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
flush = FlushStats.readFlushStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
warmer = WarmerStats.readWarmerStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
queryCache = QueryCacheStats.readQueryCacheStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
fieldData = FieldDataStats.readFieldDataStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
completion = CompletionStats.readCompletionStats(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
segments = SegmentsStats.readSegmentsStats(in);
|
||||
}
|
||||
translog = in.readOptionalStreamable(TranslogStats::new);
|
||||
requestCache = in.readOptionalStreamable(RequestCacheStats::new);
|
||||
recoveryStats = in.readOptionalStreamable(RecoveryStats::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
if (docs == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
docs.writeTo(out);
|
||||
}
|
||||
if (store == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
store.writeTo(out);
|
||||
}
|
||||
if (indexing == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
indexing.writeTo(out);
|
||||
}
|
||||
if (get == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
get.writeTo(out);
|
||||
}
|
||||
if (search == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
search.writeTo(out);
|
||||
}
|
||||
if (merge == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
merge.writeTo(out);
|
||||
}
|
||||
if (refresh == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
refresh.writeTo(out);
|
||||
}
|
||||
if (flush == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
flush.writeTo(out);
|
||||
}
|
||||
if (warmer == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
warmer.writeTo(out);
|
||||
}
|
||||
if (queryCache == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
queryCache.writeTo(out);
|
||||
}
|
||||
if (fieldData == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
fieldData.writeTo(out);
|
||||
}
|
||||
if (completion == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
completion.writeTo(out);
|
||||
}
|
||||
if (segments == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
segments.writeTo(out);
|
||||
}
|
||||
out.writeOptionalStreamable(translog);
|
||||
out.writeOptionalStreamable(requestCache);
|
||||
out.writeOptionalStreamable(recoveryStats);
|
||||
}
|
||||
|
||||
// note, requires a wrapping object
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
|
|
|
@ -19,17 +19,15 @@
|
|||
|
||||
package org.elasticsearch.action.admin.indices.stats;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class CommonStatsFlags implements Streamable, Cloneable {
|
||||
public class CommonStatsFlags implements Writeable, Cloneable {
|
||||
|
||||
public static final CommonStatsFlags ALL = new CommonStatsFlags().all();
|
||||
public static final CommonStatsFlags NONE = new CommonStatsFlags().clear();
|
||||
|
@ -41,19 +39,45 @@ public class CommonStatsFlags implements Streamable, Cloneable {
|
|||
private String[] completionDataFields = null;
|
||||
private boolean includeSegmentFileSizes = false;
|
||||
|
||||
|
||||
/**
|
||||
* @param flags flags to set. If no flags are supplied, default flags will be set.
|
||||
*/
|
||||
public CommonStatsFlags(Flag... flags) {
|
||||
if (flags.length > 0) {
|
||||
clear();
|
||||
for (Flag f : flags) {
|
||||
this.flags.add(f);
|
||||
}
|
||||
Collections.addAll(this.flags, flags);
|
||||
}
|
||||
}
|
||||
|
||||
public CommonStatsFlags(StreamInput in) throws IOException {
|
||||
final long longFlags = in.readLong();
|
||||
flags.clear();
|
||||
for (Flag flag : Flag.values()) {
|
||||
if ((longFlags & (1 << flag.ordinal())) != 0) {
|
||||
flags.add(flag);
|
||||
}
|
||||
}
|
||||
types = in.readStringArray();
|
||||
groups = in.readStringArray();
|
||||
fieldDataFields = in.readStringArray();
|
||||
completionDataFields = in.readStringArray();
|
||||
includeSegmentFileSizes = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
long longFlags = 0;
|
||||
for (Flag flag : flags) {
|
||||
longFlags |= (1 << flag.ordinal());
|
||||
}
|
||||
out.writeLong(longFlags);
|
||||
|
||||
out.writeStringArrayNullable(types);
|
||||
out.writeStringArrayNullable(groups);
|
||||
out.writeStringArrayNullable(fieldDataFields);
|
||||
out.writeStringArrayNullable(completionDataFields);
|
||||
out.writeBoolean(includeSegmentFileSizes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets all flags to return all stats.
|
||||
|
@ -162,7 +186,6 @@ public class CommonStatsFlags implements Streamable, Cloneable {
|
|||
flags.add(flag);
|
||||
}
|
||||
|
||||
|
||||
public CommonStatsFlags set(Flag flag, boolean add) {
|
||||
if (add) {
|
||||
set(flag);
|
||||
|
@ -172,49 +195,6 @@ public class CommonStatsFlags implements Streamable, Cloneable {
|
|||
return this;
|
||||
}
|
||||
|
||||
public static CommonStatsFlags readCommonStatsFlags(StreamInput in) throws IOException {
|
||||
CommonStatsFlags flags = new CommonStatsFlags();
|
||||
flags.readFrom(in);
|
||||
return flags;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
long longFlags = 0;
|
||||
for (Flag flag : flags) {
|
||||
longFlags |= (1 << flag.ordinal());
|
||||
}
|
||||
out.writeLong(longFlags);
|
||||
|
||||
out.writeStringArrayNullable(types);
|
||||
out.writeStringArrayNullable(groups);
|
||||
out.writeStringArrayNullable(fieldDataFields);
|
||||
out.writeStringArrayNullable(completionDataFields);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
out.writeBoolean(includeSegmentFileSizes);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
final long longFlags = in.readLong();
|
||||
flags.clear();
|
||||
for (Flag flag : Flag.values()) {
|
||||
if ((longFlags & (1 << flag.ordinal())) != 0) {
|
||||
flags.add(flag);
|
||||
}
|
||||
}
|
||||
types = in.readStringArray();
|
||||
groups = in.readStringArray();
|
||||
fieldDataFields = in.readStringArray();
|
||||
completionDataFields = in.readStringArray();
|
||||
if (in.getVersion().onOrAfter(Version.V_5_0_0_alpha1)) {
|
||||
includeSegmentFileSizes = in.readBoolean();
|
||||
} else {
|
||||
includeSegmentFileSizes = false;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CommonStatsFlags clone() {
|
||||
try {
|
||||
|
@ -226,7 +206,7 @@ public class CommonStatsFlags implements Streamable, Cloneable {
|
|||
}
|
||||
}
|
||||
|
||||
public static enum Flag {
|
||||
public enum Flag {
|
||||
// Do not change the order of these flags we use
|
||||
// the ordinal for encoding! Only append to the end!
|
||||
Store("store"),
|
||||
|
@ -247,7 +227,6 @@ public class CommonStatsFlags implements Streamable, Cloneable {
|
|||
RequestCache("request_cache"),
|
||||
Recovery("recovery");
|
||||
|
||||
|
||||
private final String restName;
|
||||
|
||||
Flag(String restName) {
|
||||
|
@ -257,6 +236,5 @@ public class CommonStatsFlags implements Streamable, Cloneable {
|
|||
public String getRestName() {
|
||||
return restName;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -274,6 +274,6 @@ public class IndicesStatsRequest extends BroadcastRequest<IndicesStatsRequest> {
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
flags = CommonStatsFlags.readCommonStatsFlags(in);
|
||||
flags = new CommonStatsFlags(in);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ public class ShardStats implements Streamable, ToXContent {
|
|||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
shardRouting = new ShardRouting(in);
|
||||
commonStats = CommonStats.readCommonStats(in);
|
||||
commonStats = new CommonStats(in);
|
||||
commitStats = CommitStats.readOptionalCommitStatsFrom(in);
|
||||
statePath = in.readString();
|
||||
dataPath = in.readString();
|
||||
|
|
|
@ -27,9 +27,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
|
|
|
@ -74,6 +74,8 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
|
||||
private Map<String, IndexMetaData.Custom> customs = new HashMap<>();
|
||||
|
||||
private Integer version;
|
||||
|
||||
public PutIndexTemplateRequest() {
|
||||
}
|
||||
|
||||
|
@ -129,6 +131,15 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
return this.order;
|
||||
}
|
||||
|
||||
public PutIndexTemplateRequest version(Integer version) {
|
||||
this.version = version;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Integer version() {
|
||||
return this.version;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set to <tt>true</tt> to force only creation, not an update of an index template. If it already
|
||||
* exists, it will fail with an {@link org.elasticsearch.indices.IndexTemplateAlreadyExistsException}.
|
||||
|
@ -278,16 +289,23 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
template(entry.getValue().toString());
|
||||
} else if (name.equals("order")) {
|
||||
order(XContentMapValues.nodeIntegerValue(entry.getValue(), order()));
|
||||
} else if ("version".equals(name)) {
|
||||
if ((entry.getValue() instanceof Integer) == false) {
|
||||
throw new IllegalArgumentException("Malformed [version] value, should be an integer");
|
||||
}
|
||||
version((Integer)entry.getValue());
|
||||
} else if (name.equals("settings")) {
|
||||
if (!(entry.getValue() instanceof Map)) {
|
||||
throw new IllegalArgumentException("Malformed settings section, should include an inner object");
|
||||
throw new IllegalArgumentException("Malformed [settings] section, should include an inner object");
|
||||
}
|
||||
settings((Map<String, Object>) entry.getValue());
|
||||
} else if (name.equals("mappings")) {
|
||||
Map<String, Object> mappings = (Map<String, Object>) entry.getValue();
|
||||
for (Map.Entry<String, Object> entry1 : mappings.entrySet()) {
|
||||
if (!(entry1.getValue() instanceof Map)) {
|
||||
throw new IllegalArgumentException("Malformed mappings section for type [" + entry1.getKey() + "], should include an inner object describing the mapping");
|
||||
throw new IllegalArgumentException(
|
||||
"Malformed [mappings] section for type [" + entry1.getKey() +
|
||||
"], should include an inner object describing the mapping");
|
||||
}
|
||||
mapping(entry1.getKey(), (Map<String, Object>) entry1.getValue());
|
||||
}
|
||||
|
@ -449,6 +467,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
for (int i = 0; i < aliasesSize; i++) {
|
||||
aliases.add(Alias.read(in));
|
||||
}
|
||||
version = in.readOptionalVInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -474,5 +493,6 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
for (Alias alias : aliases) {
|
||||
alias.writeTo(out);
|
||||
}
|
||||
out.writeOptionalVInt(version);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,8 @@ import java.util.Map;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class PutIndexTemplateRequestBuilder extends MasterNodeOperationRequestBuilder<PutIndexTemplateRequest, PutIndexTemplateResponse, PutIndexTemplateRequestBuilder> {
|
||||
public class PutIndexTemplateRequestBuilder
|
||||
extends MasterNodeOperationRequestBuilder<PutIndexTemplateRequest, PutIndexTemplateResponse, PutIndexTemplateRequestBuilder> {
|
||||
|
||||
public PutIndexTemplateRequestBuilder(ElasticsearchClient client, PutIndexTemplateAction action) {
|
||||
super(client, action, new PutIndexTemplateRequest());
|
||||
|
@ -56,6 +57,14 @@ public class PutIndexTemplateRequestBuilder extends MasterNodeOperationRequestBu
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the optional version of this template.
|
||||
*/
|
||||
public PutIndexTemplateRequestBuilder setVersion(Integer version) {
|
||||
request.version(version);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set to <tt>true</tt> to force only creation, not an update of an index template. If it already
|
||||
* exists, it will fail with an {@link org.elasticsearch.indices.IndexTemplateAlreadyExistsException}.
|
||||
|
|
|
@ -86,7 +86,8 @@ public class TransportPutIndexTemplateAction extends TransportMasterNodeAction<P
|
|||
.aliases(request.aliases())
|
||||
.customs(request.customs())
|
||||
.create(request.create())
|
||||
.masterTimeout(request.masterNodeTimeout()),
|
||||
.masterTimeout(request.masterNodeTimeout())
|
||||
.version(request.version()),
|
||||
|
||||
new MetaDataIndexTemplateService.PutListener() {
|
||||
@Override
|
||||
|
|
|
@ -139,12 +139,7 @@ public class UpdateHelper extends AbstractComponent {
|
|||
return new Result(indexRequest, DocWriteResponse.Result.CREATED, null, null);
|
||||
}
|
||||
|
||||
long updateVersion = getResult.getVersion();
|
||||
|
||||
if (request.versionType() != VersionType.INTERNAL) {
|
||||
assert request.versionType() == VersionType.FORCE;
|
||||
updateVersion = request.version(); // remember, match_any is excluded by the conflict test
|
||||
}
|
||||
final long updateVersion = getResult.getVersion();
|
||||
|
||||
if (getResult.internalSourceRef() == null) {
|
||||
// no source, we can't do nothing, through a failure...
|
||||
|
|
|
@ -106,8 +106,9 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
|
|||
validationException = addValidationError("id is missing", validationException);
|
||||
}
|
||||
|
||||
if (!(versionType == VersionType.INTERNAL || versionType == VersionType.FORCE)) {
|
||||
validationException = addValidationError("version type [" + versionType + "] is not supported by the update API", validationException);
|
||||
if (versionType != VersionType.INTERNAL) {
|
||||
validationException = addValidationError("version type [" + versionType + "] is not supported by the update API",
|
||||
validationException);
|
||||
} else {
|
||||
|
||||
if (version != Versions.MATCH_ANY && retryOnConflict > 0) {
|
||||
|
|
|
@ -20,6 +20,8 @@
|
|||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.core.Appender;
|
||||
import org.apache.logging.log4j.core.appender.ConsoleAppender;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
@ -29,6 +31,7 @@ import org.elasticsearch.cli.Terminal;
|
|||
import org.elasticsearch.common.PidFile;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.inject.CreationException;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.logging.LogConfigurator;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -38,12 +41,16 @@ import org.elasticsearch.monitor.jvm.JvmInfo;
|
|||
import org.elasticsearch.monitor.os.OsProbe;
|
||||
import org.elasticsearch.monitor.process.ProcessProbe;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.NodeValidationException;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.file.Path;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
|
||||
|
@ -142,7 +149,7 @@ final class Bootstrap {
|
|||
JvmInfo.jvmInfo();
|
||||
}
|
||||
|
||||
private void setup(boolean addShutdownHook, Environment environment) throws Exception {
|
||||
private void setup(boolean addShutdownHook, Environment environment) throws BootstrapException {
|
||||
Settings settings = environment.settings();
|
||||
initializeNatives(
|
||||
environment.tmpFile(),
|
||||
|
@ -166,15 +173,25 @@ final class Bootstrap {
|
|||
});
|
||||
}
|
||||
|
||||
// look for jar hell
|
||||
JarHell.checkJarHell();
|
||||
try {
|
||||
// look for jar hell
|
||||
JarHell.checkJarHell();
|
||||
} catch (IOException | URISyntaxException e) {
|
||||
throw new BootstrapException(e);
|
||||
}
|
||||
|
||||
// install SM after natives, shutdown hooks, etc.
|
||||
Security.configure(environment, BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(settings));
|
||||
try {
|
||||
Security.configure(environment, BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(settings));
|
||||
} catch (IOException | NoSuchAlgorithmException e) {
|
||||
throw new BootstrapException(e);
|
||||
}
|
||||
|
||||
node = new Node(environment) {
|
||||
@Override
|
||||
protected void validateNodeBeforeAcceptingRequests(Settings settings, BoundTransportAddress boundTransportAddress) {
|
||||
protected void validateNodeBeforeAcceptingRequests(
|
||||
final Settings settings,
|
||||
final BoundTransportAddress boundTransportAddress) throws NodeValidationException {
|
||||
BootstrapCheck.check(settings, boundTransportAddress);
|
||||
}
|
||||
};
|
||||
|
@ -189,7 +206,7 @@ final class Bootstrap {
|
|||
return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal, esSettings);
|
||||
}
|
||||
|
||||
private void start() {
|
||||
private void start() throws NodeValidationException {
|
||||
node.start();
|
||||
keepAliveThread.start();
|
||||
}
|
||||
|
@ -216,7 +233,7 @@ final class Bootstrap {
|
|||
static void init(
|
||||
final boolean foreground,
|
||||
final Path pidFile,
|
||||
final Map<String, String> esSettings) throws Exception {
|
||||
final Map<String, String> esSettings) throws BootstrapException, NodeValidationException {
|
||||
// Set the system property before anything has a chance to trigger its use
|
||||
initLoggerPrefix();
|
||||
|
||||
|
@ -227,16 +244,28 @@ final class Bootstrap {
|
|||
INSTANCE = new Bootstrap();
|
||||
|
||||
Environment environment = initialEnvironment(foreground, pidFile, esSettings);
|
||||
LogConfigurator.configure(environment, true);
|
||||
try {
|
||||
LogConfigurator.configure(environment, true);
|
||||
} catch (IOException e) {
|
||||
throw new BootstrapException(e);
|
||||
}
|
||||
checkForCustomConfFile();
|
||||
|
||||
if (environment.pidFile() != null) {
|
||||
PidFile.create(environment.pidFile(), true);
|
||||
try {
|
||||
PidFile.create(environment.pidFile(), true);
|
||||
} catch (IOException e) {
|
||||
throw new BootstrapException(e);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if (!foreground) {
|
||||
Loggers.disableConsoleLogging();
|
||||
final Logger rootLogger = ESLoggerFactory.getRootLogger();
|
||||
final Appender maybeConsoleAppender = Loggers.findAppender(rootLogger, ConsoleAppender.class);
|
||||
if (maybeConsoleAppender != null) {
|
||||
Loggers.removeAppender(rootLogger, maybeConsoleAppender);
|
||||
}
|
||||
closeSystOut();
|
||||
}
|
||||
|
||||
|
@ -259,10 +288,12 @@ final class Bootstrap {
|
|||
if (!foreground) {
|
||||
closeSysError();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
} catch (NodeValidationException | RuntimeException e) {
|
||||
// disable console logging, so user does not see the exception twice (jvm will show it already)
|
||||
if (foreground) {
|
||||
Loggers.disableConsoleLogging();
|
||||
final Logger rootLogger = ESLoggerFactory.getRootLogger();
|
||||
final Appender maybeConsoleAppender = Loggers.findAppender(rootLogger, ConsoleAppender.class);
|
||||
if (foreground && maybeConsoleAppender != null) {
|
||||
Loggers.removeAppender(rootLogger, maybeConsoleAppender);
|
||||
}
|
||||
Logger logger = Loggers.getLogger(Bootstrap.class);
|
||||
if (INSTANCE.node != null) {
|
||||
|
@ -272,17 +303,30 @@ final class Bootstrap {
|
|||
if (e instanceof CreationException) {
|
||||
// guice: log the shortened exc to the log file
|
||||
ByteArrayOutputStream os = new ByteArrayOutputStream();
|
||||
PrintStream ps = new PrintStream(os, false, "UTF-8");
|
||||
PrintStream ps = null;
|
||||
try {
|
||||
ps = new PrintStream(os, false, "UTF-8");
|
||||
} catch (UnsupportedEncodingException uee) {
|
||||
assert false;
|
||||
e.addSuppressed(uee);
|
||||
}
|
||||
new StartupException(e).printStackTrace(ps);
|
||||
ps.flush();
|
||||
logger.error("Guice Exception: {}", os.toString("UTF-8"));
|
||||
try {
|
||||
logger.error("Guice Exception: {}", os.toString("UTF-8"));
|
||||
} catch (UnsupportedEncodingException uee) {
|
||||
assert false;
|
||||
e.addSuppressed(uee);
|
||||
}
|
||||
} else if (e instanceof NodeValidationException) {
|
||||
logger.error("node validation exception\n{}", e.getMessage());
|
||||
} else {
|
||||
// full exception
|
||||
logger.error("Exception", e);
|
||||
}
|
||||
// re-enable it if appropriate, so they can see any logging during the shutdown process
|
||||
if (foreground) {
|
||||
Loggers.enableConsoleLogging();
|
||||
if (foreground && maybeConsoleAppender != null) {
|
||||
Loggers.addAppender(rootLogger, maybeConsoleAppender);
|
||||
}
|
||||
|
||||
throw e;
|
||||
|
|
|
@ -23,6 +23,8 @@ import org.apache.logging.log4j.Logger;
|
|||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.logging.log4j.util.Supplier;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
|
@ -32,6 +34,7 @@ import org.elasticsearch.common.transport.TransportAddress;
|
|||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.monitor.process.ProcessProbe;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.NodeValidationException;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
|
@ -62,7 +65,7 @@ final class BootstrapCheck {
|
|||
* @param settings the current node settings
|
||||
* @param boundTransportAddress the node network bindings
|
||||
*/
|
||||
static void check(final Settings settings, final BoundTransportAddress boundTransportAddress) {
|
||||
static void check(final Settings settings, final BoundTransportAddress boundTransportAddress) throws NodeValidationException {
|
||||
check(
|
||||
enforceLimits(boundTransportAddress),
|
||||
BootstrapSettings.IGNORE_SYSTEM_BOOTSTRAP_CHECKS.get(settings),
|
||||
|
@ -82,7 +85,11 @@ final class BootstrapCheck {
|
|||
* @param nodeName the node name to be used as a logging prefix
|
||||
*/
|
||||
// visible for testing
|
||||
static void check(final boolean enforceLimits, final boolean ignoreSystemChecks, final List<Check> checks, final String nodeName) {
|
||||
static void check(
|
||||
final boolean enforceLimits,
|
||||
final boolean ignoreSystemChecks,
|
||||
final List<Check> checks,
|
||||
final String nodeName) throws NodeValidationException {
|
||||
check(enforceLimits, ignoreSystemChecks, checks, Loggers.getLogger(BootstrapCheck.class, nodeName));
|
||||
}
|
||||
|
||||
|
@ -101,7 +108,7 @@ final class BootstrapCheck {
|
|||
final boolean enforceLimits,
|
||||
final boolean ignoreSystemChecks,
|
||||
final List<Check> checks,
|
||||
final Logger logger) {
|
||||
final Logger logger) throws NodeValidationException {
|
||||
final List<String> errors = new ArrayList<>();
|
||||
final List<String> ignoredErrors = new ArrayList<>();
|
||||
|
||||
|
@ -130,9 +137,9 @@ final class BootstrapCheck {
|
|||
final List<String> messages = new ArrayList<>(1 + errors.size());
|
||||
messages.add("bootstrap checks failed");
|
||||
messages.addAll(errors);
|
||||
final RuntimeException re = new RuntimeException(String.join("\n", messages));
|
||||
errors.stream().map(IllegalStateException::new).forEach(re::addSuppressed);
|
||||
throw re;
|
||||
final NodeValidationException ne = new NodeValidationException(String.join("\n", messages));
|
||||
errors.stream().map(IllegalStateException::new).forEach(ne::addSuppressed);
|
||||
throw ne;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.bootstrap;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Wrapper exception for checked exceptions thrown during the bootstrap process. Methods invoked
|
||||
* during bootstrap should explicitly declare the checked exceptions that they can throw, rather
|
||||
* than declaring the top-level checked exception {@link Exception}. This exception exists to wrap
|
||||
* these checked exceptions so that {@link Bootstrap#init(boolean, Path, Map)} does not have to
|
||||
* declare all of these checked exceptions.
|
||||
*/
|
||||
class BootstrapException extends Exception {
|
||||
|
||||
/**
|
||||
* Wraps an existing exception.
|
||||
*
|
||||
* @param cause the underlying cause of bootstrap failing
|
||||
*/
|
||||
BootstrapException(final Exception cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
}
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.cli.SettingCommand;
|
|||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
import org.elasticsearch.node.NodeValidationException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
|
@ -75,7 +76,7 @@ class Elasticsearch extends SettingCommand {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception {
|
||||
protected void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws UserException {
|
||||
if (options.nonOptionArguments().isEmpty() == false) {
|
||||
throw new UserException(ExitCodes.USAGE, "Positional arguments not allowed, found " + options.nonOptionArguments());
|
||||
}
|
||||
|
@ -92,16 +93,20 @@ class Elasticsearch extends SettingCommand {
|
|||
final boolean daemonize = options.has(daemonizeOption);
|
||||
final Path pidFile = pidfileOption.value(options);
|
||||
|
||||
init(daemonize, pidFile, settings);
|
||||
try {
|
||||
init(daemonize, pidFile, settings);
|
||||
} catch (NodeValidationException e) {
|
||||
throw new UserException(ExitCodes.CONFIG, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
void init(final boolean daemonize, final Path pidFile, final Map<String, String> esSettings) {
|
||||
void init(final boolean daemonize, final Path pidFile, final Map<String, String> esSettings) throws NodeValidationException {
|
||||
try {
|
||||
Bootstrap.init(!daemonize, pidFile, esSettings);
|
||||
} catch (final Throwable t) {
|
||||
} catch (BootstrapException | RuntimeException e) {
|
||||
// format exceptions to the console in a special way
|
||||
// to avoid 2MB stacktraces from guice, etc.
|
||||
throw new StartupException(t);
|
||||
throw new StartupException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.logging.Loggers;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.net.URLClassLoader;
|
||||
import java.nio.file.FileVisitResult;
|
||||
|
@ -74,7 +75,7 @@ public class JarHell {
|
|||
* Checks the current classpath for duplicate classes
|
||||
* @throws IllegalStateException if jar hell was found
|
||||
*/
|
||||
public static void checkJarHell() throws Exception {
|
||||
public static void checkJarHell() throws IOException, URISyntaxException {
|
||||
ClassLoader loader = JarHell.class.getClassLoader();
|
||||
Logger logger = Loggers.getLogger(JarHell.class);
|
||||
if (logger.isDebugEnabled()) {
|
||||
|
@ -149,7 +150,7 @@ public class JarHell {
|
|||
* @throws IllegalStateException if jar hell was found
|
||||
*/
|
||||
@SuppressForbidden(reason = "needs JarFile for speed, just reading entries")
|
||||
public static void checkJarHell(URL urls[]) throws Exception {
|
||||
public static void checkJarHell(URL urls[]) throws URISyntaxException, IOException {
|
||||
Logger logger = Loggers.getLogger(JarHell.class);
|
||||
// we don't try to be sneaky and use deprecated/internal/not portable stuff
|
||||
// like sun.boot.class.path, and with jigsaw we don't yet have a way to get
|
||||
|
@ -278,6 +279,12 @@ public class JarHell {
|
|||
* cf. https://issues.apache.org/jira/browse/LOG4J2-1560
|
||||
*/
|
||||
return;
|
||||
} else if (clazz.startsWith("org.apache.logging.log4j.core.jmx.Server")) {
|
||||
/*
|
||||
* deliberate to hack around a bug in Log4j
|
||||
* cf. https://issues.apache.org/jira/browse/LOG4J2-1506
|
||||
*/
|
||||
return;
|
||||
}
|
||||
throw new IllegalStateException("jar hell!" + System.lineSeparator() +
|
||||
"class: " + clazz + System.lineSeparator() +
|
||||
|
|
|
@ -114,13 +114,13 @@ final class Security {
|
|||
* @param environment configuration for generating dynamic permissions
|
||||
* @param filterBadDefaults true if we should filter out bad java defaults in the system policy.
|
||||
*/
|
||||
static void configure(Environment environment, boolean filterBadDefaults) throws Exception {
|
||||
static void configure(Environment environment, boolean filterBadDefaults) throws IOException, NoSuchAlgorithmException {
|
||||
|
||||
// enable security policy: union of template and environment-based paths, and possibly plugin permissions
|
||||
Policy.setPolicy(new ESPolicy(createPermissions(environment), getPluginPermissions(environment), filterBadDefaults));
|
||||
|
||||
// enable security manager
|
||||
System.setSecurityManager(new SecureSM(new String[] { "org.elasticsearch.bootstrap." }));
|
||||
System.setSecurityManager(new SecureSM(new String[] { "org.elasticsearch.bootstrap.", "org.elasticsearch.cli" }));
|
||||
|
||||
// do some basic tests
|
||||
selfTest();
|
||||
|
@ -258,7 +258,7 @@ final class Security {
|
|||
addPath(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete");
|
||||
}
|
||||
// TODO: this should be removed in ES 6.0! We will no longer support data paths with the cluster as a folder
|
||||
assert Version.CURRENT.major < 6 : "cluster name is no longer used in data path";
|
||||
// https://github.com/elastic/elasticsearch/issues/20391
|
||||
for (Path path : environment.dataWithClusterFiles()) {
|
||||
addPathIfExists(policy, Environment.PATH_DATA_SETTING.getKey(), path, "read,readlink,write,delete");
|
||||
}
|
||||
|
|
|
@ -21,7 +21,9 @@ package org.elasticsearch.cluster.metadata;
|
|||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
|
@ -37,6 +39,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -50,6 +53,26 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
|
||||
private final int order;
|
||||
|
||||
/**
|
||||
* The version is an arbitrary number managed by the user so that they can easily and quickly verify the existence of a given template.
|
||||
* Expected usage:
|
||||
* <pre><code>
|
||||
* PUT /_template/my_template
|
||||
* {
|
||||
* "template": "my_index-*",
|
||||
* "mappings": { ... },
|
||||
* "version": 1
|
||||
* }
|
||||
* </code></pre>
|
||||
* Then, some process from the user can occasionally verify that the template exists with the appropriate version without having to
|
||||
* check the template's content:
|
||||
* <pre><code>
|
||||
* GET /_template/my_template?filter_path=*.version
|
||||
* </code></pre>
|
||||
*/
|
||||
@Nullable
|
||||
private final Integer version;
|
||||
|
||||
private final String template;
|
||||
|
||||
private final Settings settings;
|
||||
|
@ -61,10 +84,14 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
|
||||
private final ImmutableOpenMap<String, IndexMetaData.Custom> customs;
|
||||
|
||||
public IndexTemplateMetaData(String name, int order, String template, Settings settings, ImmutableOpenMap<String, CompressedXContent> mappings,
|
||||
ImmutableOpenMap<String, AliasMetaData> aliases, ImmutableOpenMap<String, IndexMetaData.Custom> customs) {
|
||||
public IndexTemplateMetaData(String name, int order, Integer version,
|
||||
String template, Settings settings,
|
||||
ImmutableOpenMap<String, CompressedXContent> mappings,
|
||||
ImmutableOpenMap<String, AliasMetaData> aliases,
|
||||
ImmutableOpenMap<String, IndexMetaData.Custom> customs) {
|
||||
this.name = name;
|
||||
this.order = order;
|
||||
this.version = version;
|
||||
this.template = template;
|
||||
this.settings = settings;
|
||||
this.mappings = mappings;
|
||||
|
@ -84,6 +111,16 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
return order();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public Integer getVersion() {
|
||||
return version();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public Integer version() {
|
||||
return version;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
@ -150,13 +187,14 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
if (!settings.equals(that.settings)) return false;
|
||||
if (!template.equals(that.template)) return false;
|
||||
|
||||
return true;
|
||||
return Objects.equals(version, that.version);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = name.hashCode();
|
||||
result = 31 * result + order;
|
||||
result = 31 * result + Objects.hashCode(version);
|
||||
result = 31 * result + template.hashCode();
|
||||
result = 31 * result + settings.hashCode();
|
||||
result = 31 * result + mappings.hashCode();
|
||||
|
@ -184,6 +222,9 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
IndexMetaData.Custom customIndexMetaData = IndexMetaData.lookupPrototypeSafe(type).readFrom(in);
|
||||
builder.putCustom(type, customIndexMetaData);
|
||||
}
|
||||
if (in.getVersion().onOrAfter(Version.V_5_0_0_alpha6)) {
|
||||
builder.version(in.readOptionalVInt());
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
|
@ -207,6 +248,9 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
out.writeString(cursor.key);
|
||||
cursor.value.writeTo(out);
|
||||
}
|
||||
if (out.getVersion().onOrAfter(Version.V_5_0_0_alpha6)) {
|
||||
out.writeOptionalVInt(version);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder {
|
||||
|
@ -220,6 +264,8 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
|
||||
private int order;
|
||||
|
||||
private Integer version;
|
||||
|
||||
private String template;
|
||||
|
||||
private Settings settings = Settings.Builder.EMPTY_SETTINGS;
|
||||
|
@ -240,6 +286,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
public Builder(IndexTemplateMetaData indexTemplateMetaData) {
|
||||
this.name = indexTemplateMetaData.name();
|
||||
order(indexTemplateMetaData.order());
|
||||
version(indexTemplateMetaData.version());
|
||||
template(indexTemplateMetaData.template());
|
||||
settings(indexTemplateMetaData.settings());
|
||||
|
||||
|
@ -253,6 +300,11 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
return this;
|
||||
}
|
||||
|
||||
public Builder version(Integer version) {
|
||||
this.version = version;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder template(String template) {
|
||||
this.template = template;
|
||||
return this;
|
||||
|
@ -312,14 +364,18 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
}
|
||||
|
||||
public IndexTemplateMetaData build() {
|
||||
return new IndexTemplateMetaData(name, order, template, settings, mappings.build(), aliases.build(), customs.build());
|
||||
return new IndexTemplateMetaData(name, order, version, template, settings, mappings.build(), aliases.build(), customs.build());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public static void toXContent(IndexTemplateMetaData indexTemplateMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||
public static void toXContent(IndexTemplateMetaData indexTemplateMetaData, XContentBuilder builder, ToXContent.Params params)
|
||||
throws IOException {
|
||||
builder.startObject(indexTemplateMetaData.name());
|
||||
|
||||
builder.field("order", indexTemplateMetaData.order());
|
||||
if (indexTemplateMetaData.version() != null) {
|
||||
builder.field("version", indexTemplateMetaData.version());
|
||||
}
|
||||
builder.field("template", indexTemplateMetaData.template());
|
||||
|
||||
builder.startObject("settings");
|
||||
|
@ -380,7 +436,9 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if ("settings".equals(currentFieldName)) {
|
||||
Settings.Builder templateSettingsBuilder = Settings.builder();
|
||||
templateSettingsBuilder.put(SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered())).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
|
||||
templateSettingsBuilder.put(
|
||||
SettingsLoader.Helper.loadNestedFromMap(parser.mapOrdered()))
|
||||
.normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX);
|
||||
builder.settings(templateSettingsBuilder.build());
|
||||
} else if ("mappings".equals(currentFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
|
@ -388,7 +446,8 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
String mappingType = currentFieldName;
|
||||
Map<String, Object> mappingSource = MapBuilder.<String, Object>newMapBuilder().put(mappingType, parser.mapOrdered()).map();
|
||||
Map<String, Object> mappingSource =
|
||||
MapBuilder.<String, Object>newMapBuilder().put(mappingType, parser.mapOrdered()).map();
|
||||
builder.putMapping(mappingType, XContentFactory.jsonBuilder().map(mappingSource).string());
|
||||
}
|
||||
}
|
||||
|
@ -428,6 +487,8 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
builder.template(parser.text());
|
||||
} else if ("order".equals(currentFieldName)) {
|
||||
builder.order(parser.intValue());
|
||||
} else if ("version".equals(currentFieldName)) {
|
||||
builder.version(parser.intValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -204,6 +204,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent {
|
|||
createdIndex = dummyIndexService.index();
|
||||
|
||||
templateBuilder.order(request.order);
|
||||
templateBuilder.version(request.version);
|
||||
templateBuilder.template(request.template);
|
||||
templateBuilder.settings(request.settings);
|
||||
|
||||
|
@ -288,6 +289,7 @@ public class MetaDataIndexTemplateService extends AbstractComponent {
|
|||
final String cause;
|
||||
boolean create;
|
||||
int order;
|
||||
Integer version;
|
||||
String template;
|
||||
Settings settings = Settings.Builder.EMPTY_SETTINGS;
|
||||
Map<String, String> mappings = new HashMap<>();
|
||||
|
@ -345,6 +347,11 @@ public class MetaDataIndexTemplateService extends AbstractComponent {
|
|||
this.masterTimeout = masterTimeout;
|
||||
return this;
|
||||
}
|
||||
|
||||
public PutRequest version(Integer version) {
|
||||
this.version = version;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
public static class PutResponse {
|
||||
|
|
|
@ -25,10 +25,6 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
/**
|
||||
* A registry for {@link org.elasticsearch.common.io.stream.Writeable.Reader} readers of {@link NamedWriteable}.
|
||||
|
@ -47,7 +43,7 @@ public class NamedWriteableRegistry {
|
|||
/** A name for the writeable which is unique to the {@link #categoryClass}. */
|
||||
public final String name;
|
||||
|
||||
/** A reader captability of reading*/
|
||||
/** A reader capability of reading*/
|
||||
public final Writeable.Reader<?> reader;
|
||||
|
||||
/** Creates a new entry which can be stored by the registry. */
|
||||
|
|
|
@ -871,6 +871,16 @@ public abstract class StreamOutput extends OutputStream {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a list of strings
|
||||
*/
|
||||
public void writeStringList(List<String> list) throws IOException {
|
||||
writeVInt(list.size());
|
||||
for (String string: list) {
|
||||
this.writeString(string);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes a list of {@link NamedWriteable} objects.
|
||||
*/
|
||||
|
|
|
@ -78,7 +78,6 @@ public class LogConfigurator {
|
|||
}
|
||||
});
|
||||
context.start(new CompositeConfiguration(configurations));
|
||||
warnIfOldConfigurationFilePresent(environment);
|
||||
}
|
||||
|
||||
if (ESLoggerFactory.LOG_DEFAULT_LEVEL_SETTING.exists(settings)) {
|
||||
|
@ -92,30 +91,6 @@ public class LogConfigurator {
|
|||
}
|
||||
}
|
||||
|
||||
private static void warnIfOldConfigurationFilePresent(final Environment environment) throws IOException {
|
||||
// TODO: the warning for unsupported logging configurations can be removed in 6.0.0
|
||||
assert Version.CURRENT.major < 6;
|
||||
final List<String> suffixes = Arrays.asList(".yml", ".yaml", ".json", ".properties");
|
||||
final Set<FileVisitOption> options = EnumSet.of(FileVisitOption.FOLLOW_LINKS);
|
||||
Files.walkFileTree(environment.configFile(), options, Integer.MAX_VALUE, new SimpleFileVisitor<Path>() {
|
||||
@Override
|
||||
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
|
||||
final String fileName = file.getFileName().toString();
|
||||
if (fileName.startsWith("logging")) {
|
||||
for (final String suffix : suffixes) {
|
||||
if (fileName.endsWith(suffix)) {
|
||||
Loggers.getLogger(LogConfigurator.class).warn(
|
||||
"ignoring unsupported logging configuration file [{}], logging is configured via [{}]",
|
||||
file.toString(),
|
||||
file.getParent().resolve("log4j2.properties"));
|
||||
}
|
||||
}
|
||||
}
|
||||
return FileVisitResult.CONTINUE;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "sets system property for logging configuration")
|
||||
private static void setLogConfigurationSystemProperty(final Environment environment, final Settings settings) {
|
||||
System.setProperty("es.logs", environment.logsFile().resolve(ClusterName.CLUSTER_NAME_SETTING.get(settings).value()).toString());
|
||||
|
|
|
@ -20,7 +20,9 @@
|
|||
package org.elasticsearch.common.logging;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.core.Appender;
|
||||
import org.apache.logging.log4j.core.LoggerContext;
|
||||
import org.apache.logging.log4j.core.config.Configuration;
|
||||
import org.apache.logging.log4j.core.config.Configurator;
|
||||
|
@ -34,6 +36,7 @@ import org.elasticsearch.node.Node;
|
|||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.elasticsearch.common.util.CollectionUtils.asArrayList;
|
||||
|
@ -43,7 +46,7 @@ import static org.elasticsearch.common.util.CollectionUtils.asArrayList;
|
|||
*/
|
||||
public class Loggers {
|
||||
|
||||
private static final String commonPrefix = System.getProperty("es.logger.prefix", "org.elasticsearch.");
|
||||
static final String commonPrefix = System.getProperty("es.logger.prefix", "org.elasticsearch.");
|
||||
|
||||
public static final String SPACE = " ";
|
||||
|
||||
|
@ -171,4 +174,41 @@ public class Loggers {
|
|||
return commonPrefix + name;
|
||||
}
|
||||
|
||||
public static void addAppender(final Logger logger, final Appender appender) {
|
||||
final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
|
||||
final Configuration config = ctx.getConfiguration();
|
||||
config.addAppender(appender);
|
||||
LoggerConfig loggerConfig = config.getLoggerConfig(logger.getName());
|
||||
if (!logger.getName().equals(loggerConfig.getName())) {
|
||||
loggerConfig = new LoggerConfig(logger.getName(), logger.getLevel(), true);
|
||||
config.addLogger(logger.getName(), loggerConfig);
|
||||
}
|
||||
loggerConfig.addAppender(appender, null, null);
|
||||
ctx.updateLoggers();
|
||||
}
|
||||
|
||||
public static void removeAppender(final Logger logger, final Appender appender) {
|
||||
final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
|
||||
final Configuration config = ctx.getConfiguration();
|
||||
LoggerConfig loggerConfig = config.getLoggerConfig(logger.getName());
|
||||
if (!logger.getName().equals(loggerConfig.getName())) {
|
||||
loggerConfig = new LoggerConfig(logger.getName(), logger.getLevel(), true);
|
||||
config.addLogger(logger.getName(), loggerConfig);
|
||||
}
|
||||
loggerConfig.removeAppender(appender.getName());
|
||||
ctx.updateLoggers();
|
||||
}
|
||||
|
||||
public static Appender findAppender(final Logger logger, final Class<? extends Appender> clazz) {
|
||||
final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
|
||||
final Configuration config = ctx.getConfiguration();
|
||||
final LoggerConfig loggerConfig = config.getLoggerConfig(logger.getName());
|
||||
for (final Map.Entry<String, Appender> entry : loggerConfig.getAppenders().entrySet()) {
|
||||
if (entry.getValue().getClass().equals(clazz)) {
|
||||
return entry.getValue();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -411,6 +411,12 @@ public class Setting<T> extends ToXContentToBytes {
|
|||
|
||||
@Override
|
||||
public void apply(Tuple<A, B> value, Settings current, Settings previous) {
|
||||
if (aSettingUpdater.hasChanged(current, previous)) {
|
||||
logger.info("updating [{}] from [{}] to [{}]", aSetting.key, aSetting.getRaw(previous), aSetting.getRaw(current));
|
||||
}
|
||||
if (bSettingUpdater.hasChanged(current, previous)) {
|
||||
logger.info("updating [{}] from [{}] to [{}]", bSetting.key, bSetting.getRaw(previous), bSetting.getRaw(current));
|
||||
}
|
||||
consumer.accept(value.v1(), value.v2());
|
||||
}
|
||||
|
||||
|
@ -591,9 +597,9 @@ public class Setting<T> extends ToXContentToBytes {
|
|||
* Creates a setting which specifies a memory size. This can either be
|
||||
* specified as an absolute bytes value or as a percentage of the heap
|
||||
* memory.
|
||||
*
|
||||
*
|
||||
* @param key the key for the setting
|
||||
* @param defaultValue the default value for this setting
|
||||
* @param defaultValue the default value for this setting
|
||||
* @param properties properties properties for this setting like scope, filtering...
|
||||
* @return the setting object
|
||||
*/
|
||||
|
@ -606,9 +612,9 @@ public class Setting<T> extends ToXContentToBytes {
|
|||
* Creates a setting which specifies a memory size. This can either be
|
||||
* specified as an absolute bytes value or as a percentage of the heap
|
||||
* memory.
|
||||
*
|
||||
*
|
||||
* @param key the key for the setting
|
||||
* @param defaultValue a function that supplies the default value for this setting
|
||||
* @param defaultValue a function that supplies the default value for this setting
|
||||
* @param properties properties properties for this setting like scope, filtering...
|
||||
* @return the setting object
|
||||
*/
|
||||
|
@ -620,7 +626,7 @@ public class Setting<T> extends ToXContentToBytes {
|
|||
* Creates a setting which specifies a memory size. This can either be
|
||||
* specified as an absolute bytes value or as a percentage of the heap
|
||||
* memory.
|
||||
*
|
||||
*
|
||||
* @param key the key for the setting
|
||||
* @param defaultPercentage the default value of this setting as a percentage of the heap memory
|
||||
* @param properties properties properties for this setting like scope, filtering...
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -20,14 +20,13 @@
|
|||
package org.elasticsearch.common.xcontent;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.Flushable;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public interface XContentGenerator extends Closeable {
|
||||
public interface XContentGenerator extends Closeable, Flushable {
|
||||
|
||||
XContentType contentType();
|
||||
|
||||
|
@ -37,68 +36,62 @@ public interface XContentGenerator extends Closeable {
|
|||
|
||||
void usePrintLineFeedAtEnd();
|
||||
|
||||
void writeStartArray() throws IOException;
|
||||
|
||||
void writeEndArray() throws IOException;
|
||||
|
||||
void writeStartObject() throws IOException;
|
||||
|
||||
void writeEndObject() throws IOException;
|
||||
|
||||
void writeStartArray() throws IOException;
|
||||
|
||||
void writeEndArray() throws IOException;
|
||||
|
||||
void writeFieldName(String name) throws IOException;
|
||||
|
||||
void writeString(String text) throws IOException;
|
||||
|
||||
void writeString(char[] text, int offset, int len) throws IOException;
|
||||
|
||||
void writeUTF8String(byte[] text, int offset, int length) throws IOException;
|
||||
|
||||
void writeBinary(byte[] data, int offset, int len) throws IOException;
|
||||
|
||||
void writeBinary(byte[] data) throws IOException;
|
||||
|
||||
void writeNumber(int v) throws IOException;
|
||||
|
||||
void writeNumber(long v) throws IOException;
|
||||
|
||||
void writeNumber(double d) throws IOException;
|
||||
|
||||
void writeNumber(float f) throws IOException;
|
||||
|
||||
void writeBoolean(boolean state) throws IOException;
|
||||
|
||||
void writeNull() throws IOException;
|
||||
|
||||
void writeStringField(String fieldName, String value) throws IOException;
|
||||
void writeNullField(String name) throws IOException;
|
||||
|
||||
void writeBooleanField(String fieldName, boolean value) throws IOException;
|
||||
void writeBooleanField(String name, boolean value) throws IOException;
|
||||
|
||||
void writeNullField(String fieldName) throws IOException;
|
||||
void writeBoolean(boolean value) throws IOException;
|
||||
|
||||
void writeNumberField(String fieldName, int value) throws IOException;
|
||||
void writeNumberField(String name, double value) throws IOException;
|
||||
|
||||
void writeNumberField(String fieldName, long value) throws IOException;
|
||||
void writeNumber(double value) throws IOException;
|
||||
|
||||
void writeNumberField(String fieldName, double value) throws IOException;
|
||||
void writeNumberField(String name, float value) throws IOException;
|
||||
|
||||
void writeNumberField(String fieldName, float value) throws IOException;
|
||||
void writeNumber(float value) throws IOException;
|
||||
|
||||
void writeBinaryField(String fieldName, byte[] data) throws IOException;
|
||||
void writeNumberField(String name, int value) throws IOException;
|
||||
|
||||
void writeArrayFieldStart(String fieldName) throws IOException;
|
||||
void writeNumber(int value) throws IOException;
|
||||
|
||||
void writeObjectFieldStart(String fieldName) throws IOException;
|
||||
void writeNumberField(String name, long value) throws IOException;
|
||||
|
||||
void writeRawField(String fieldName, InputStream content) throws IOException;
|
||||
void writeNumber(long value) throws IOException;
|
||||
|
||||
void writeRawField(String fieldName, BytesReference content) throws IOException;
|
||||
void writeNumber(short value) throws IOException;
|
||||
|
||||
void writeRawValue(BytesReference content) throws IOException;
|
||||
void writeStringField(String name, String value) throws IOException;
|
||||
|
||||
void writeString(String value) throws IOException;
|
||||
|
||||
void writeString(char[] text, int offset, int len) throws IOException;
|
||||
|
||||
void writeUTF8String(byte[] value, int offset, int length) throws IOException;
|
||||
|
||||
void writeBinaryField(String name, byte[] value) throws IOException;
|
||||
|
||||
void writeBinary(byte[] value) throws IOException;
|
||||
|
||||
void writeBinary(byte[] value, int offset, int length) throws IOException;
|
||||
|
||||
void writeRawField(String name, InputStream value) throws IOException;
|
||||
|
||||
void writeRawField(String name, BytesReference value) throws IOException;
|
||||
|
||||
void writeRawValue(BytesReference value) throws IOException;
|
||||
|
||||
void copyCurrentStructure(XContentParser parser) throws IOException;
|
||||
|
||||
void flush() throws IOException;
|
||||
|
||||
@Override
|
||||
void close() throws IOException;
|
||||
}
|
||||
|
|
|
@ -47,30 +47,10 @@ public class JsonXContent implements XContent {
|
|||
}
|
||||
|
||||
private static final JsonFactory jsonFactory;
|
||||
public static final String JSON_ALLOW_UNQUOTED_FIELD_NAMES = "elasticsearch.json.allow_unquoted_field_names";
|
||||
public static final JsonXContent jsonXContent;
|
||||
public static final boolean unquotedFieldNamesSet;
|
||||
|
||||
static {
|
||||
jsonFactory = new JsonFactory();
|
||||
// TODO: Remove the system property configuration for this in Elasticsearch 6.0.0
|
||||
String jsonUnquoteProp = System.getProperty(JSON_ALLOW_UNQUOTED_FIELD_NAMES);
|
||||
if (jsonUnquoteProp == null) {
|
||||
unquotedFieldNamesSet = false;
|
||||
jsonFactory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, false);
|
||||
} else {
|
||||
unquotedFieldNamesSet = true;
|
||||
switch (jsonUnquoteProp) {
|
||||
case "true":
|
||||
jsonFactory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
|
||||
break;
|
||||
case "false":
|
||||
jsonFactory.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, false);
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException("invalid value for [" + JSON_ALLOW_UNQUOTED_FIELD_NAMES + "]: " + jsonUnquoteProp);
|
||||
}
|
||||
}
|
||||
jsonFactory.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, true);
|
||||
jsonFactory.configure(JsonParser.Feature.ALLOW_COMMENTS, true);
|
||||
jsonFactory.configure(JsonFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now...
|
||||
|
|
|
@ -47,9 +47,6 @@ import java.util.Collections;
|
|||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class JsonXContentGenerator implements XContentGenerator {
|
||||
|
||||
/** Generator used to write content **/
|
||||
|
@ -130,16 +127,6 @@ public class JsonXContentGenerator implements XContentGenerator {
|
|||
writeLineFeedAtEnd = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeStartArray() throws IOException {
|
||||
generator.writeStartArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeEndArray() throws IOException {
|
||||
generator.writeEndArray();
|
||||
}
|
||||
|
||||
private boolean isFiltered() {
|
||||
return filter != null;
|
||||
}
|
||||
|
@ -184,118 +171,124 @@ public class JsonXContentGenerator implements XContentGenerator {
|
|||
generator.writeEndObject();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void writeStartArray() throws IOException {
|
||||
generator.writeStartArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeEndArray() throws IOException {
|
||||
generator.writeEndArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeFieldName(String name) throws IOException {
|
||||
generator.writeFieldName(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeString(String text) throws IOException {
|
||||
generator.writeString(text);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeString(char[] text, int offset, int len) throws IOException {
|
||||
generator.writeString(text, offset, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeUTF8String(byte[] text, int offset, int length) throws IOException {
|
||||
generator.writeUTF8String(text, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinary(byte[] data, int offset, int len) throws IOException {
|
||||
generator.writeBinary(data, offset, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinary(byte[] data) throws IOException {
|
||||
generator.writeBinary(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(int v) throws IOException {
|
||||
generator.writeNumber(v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(long v) throws IOException {
|
||||
generator.writeNumber(v);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(double d) throws IOException {
|
||||
generator.writeNumber(d);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(float f) throws IOException {
|
||||
generator.writeNumber(f);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBoolean(boolean state) throws IOException {
|
||||
generator.writeBoolean(state);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNull() throws IOException {
|
||||
generator.writeNull();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeStringField(String fieldName, String value) throws IOException {
|
||||
generator.writeStringField(fieldName, value);
|
||||
public void writeNullField(String name) throws IOException {
|
||||
generator.writeNullField(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBooleanField(String fieldName, boolean value) throws IOException {
|
||||
generator.writeBooleanField(fieldName, value);
|
||||
public void writeBooleanField(String name, boolean value) throws IOException {
|
||||
generator.writeBooleanField(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNullField(String fieldName) throws IOException {
|
||||
generator.writeNullField(fieldName);
|
||||
public void writeBoolean(boolean value) throws IOException {
|
||||
generator.writeBoolean(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, int value) throws IOException {
|
||||
generator.writeNumberField(fieldName, value);
|
||||
public void writeNumberField(String name, double value) throws IOException {
|
||||
generator.writeNumberField(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, long value) throws IOException {
|
||||
generator.writeNumberField(fieldName, value);
|
||||
public void writeNumber(double value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, double value) throws IOException {
|
||||
generator.writeNumberField(fieldName, value);
|
||||
public void writeNumberField(String name, float value) throws IOException {
|
||||
generator.writeNumberField(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumberField(String fieldName, float value) throws IOException {
|
||||
generator.writeNumberField(fieldName, value);
|
||||
public void writeNumber(float value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinaryField(String fieldName, byte[] data) throws IOException {
|
||||
generator.writeBinaryField(fieldName, data);
|
||||
public void writeNumberField(String name, int value) throws IOException {
|
||||
generator.writeNumberField(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeArrayFieldStart(String fieldName) throws IOException {
|
||||
generator.writeArrayFieldStart(fieldName);
|
||||
public void writeNumber(int value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeObjectFieldStart(String fieldName) throws IOException {
|
||||
generator.writeObjectFieldStart(fieldName);
|
||||
public void writeNumberField(String name, long value) throws IOException {
|
||||
generator.writeNumberField(name, value);
|
||||
}
|
||||
|
||||
private void writeStartRaw(String fieldName) throws IOException {
|
||||
writeFieldName(fieldName);
|
||||
@Override
|
||||
public void writeNumber(long value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeNumber(short value) throws IOException {
|
||||
generator.writeNumber(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeStringField(String name, String value) throws IOException {
|
||||
generator.writeStringField(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeString(String value) throws IOException {
|
||||
generator.writeString(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeString(char[] value, int offset, int len) throws IOException {
|
||||
generator.writeString(value, offset, len);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeUTF8String(byte[] value, int offset, int length) throws IOException {
|
||||
generator.writeUTF8String(value, offset, length);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinaryField(String name, byte[] value) throws IOException {
|
||||
generator.writeBinaryField(name, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinary(byte[] value) throws IOException {
|
||||
generator.writeBinary(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeBinary(byte[] value, int offset, int len) throws IOException {
|
||||
generator.writeBinary(value, offset, len);
|
||||
}
|
||||
|
||||
private void writeStartRaw(String name) throws IOException {
|
||||
writeFieldName(name);
|
||||
generator.writeRaw(':');
|
||||
}
|
||||
|
||||
|
@ -309,7 +302,7 @@ public class JsonXContentGenerator implements XContentGenerator {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void writeRawField(String fieldName, InputStream content) throws IOException {
|
||||
public void writeRawField(String name, InputStream content) throws IOException {
|
||||
if (content.markSupported() == false) {
|
||||
// needed for the XContentFactory.xContentType call
|
||||
content = new BufferedInputStream(content);
|
||||
|
@ -321,11 +314,11 @@ public class JsonXContentGenerator implements XContentGenerator {
|
|||
if (mayWriteRawData(contentType) == false) {
|
||||
try (XContentParser parser = XContentFactory.xContent(contentType).createParser(content)) {
|
||||
parser.nextToken();
|
||||
writeFieldName(fieldName);
|
||||
writeFieldName(name);
|
||||
copyCurrentStructure(parser);
|
||||
}
|
||||
} else {
|
||||
writeStartRaw(fieldName);
|
||||
writeStartRaw(name);
|
||||
flush();
|
||||
Streams.copy(content, os);
|
||||
writeEndRaw();
|
||||
|
@ -333,16 +326,16 @@ public class JsonXContentGenerator implements XContentGenerator {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final void writeRawField(String fieldName, BytesReference content) throws IOException {
|
||||
public final void writeRawField(String name, BytesReference content) throws IOException {
|
||||
XContentType contentType = XContentFactory.xContentType(content);
|
||||
if (contentType == null) {
|
||||
throw new IllegalArgumentException("Can't write raw bytes whose xcontent-type can't be guessed");
|
||||
}
|
||||
if (mayWriteRawData(contentType) == false) {
|
||||
writeFieldName(fieldName);
|
||||
writeFieldName(name);
|
||||
copyRawValue(content, contentType.xContent());
|
||||
} else {
|
||||
writeStartRaw(fieldName);
|
||||
writeStartRaw(name);
|
||||
flush();
|
||||
content.writeTo(os);
|
||||
writeEndRaw();
|
||||
|
@ -416,7 +409,7 @@ public class JsonXContentGenerator implements XContentGenerator {
|
|||
}
|
||||
JsonStreamContext context = generator.getOutputContext();
|
||||
if ((context != null) && (context.inRoot() == false)) {
|
||||
throw new IOException("unclosed object or array found");
|
||||
throw new IOException("Unclosed object or array found");
|
||||
}
|
||||
if (writeLineFeedAtEnd) {
|
||||
flush();
|
||||
|
|
|
@ -22,26 +22,34 @@ package org.elasticsearch.discovery;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DiscoveryStats implements Streamable, ToXContent {
|
||||
public class DiscoveryStats implements Writeable, ToXContent {
|
||||
|
||||
@Nullable
|
||||
private PendingClusterStateStats queueStats;
|
||||
private final PendingClusterStateStats queueStats;
|
||||
|
||||
public DiscoveryStats(PendingClusterStateStats queueStats) {
|
||||
this.queueStats = queueStats;
|
||||
}
|
||||
|
||||
public DiscoveryStats(StreamInput in) throws IOException {
|
||||
queueStats = in.readOptionalWriteable(PendingClusterStateStats::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeOptionalWriteable(queueStats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(Fields.DISCOVERY);
|
||||
|
||||
if (queueStats != null ){
|
||||
queueStats.toXContent(builder, params);
|
||||
}
|
||||
|
@ -49,24 +57,6 @@ public class DiscoveryStats implements Streamable, ToXContent {
|
|||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
if (in.readBoolean()) {
|
||||
queueStats = new PendingClusterStateStats();
|
||||
queueStats.readFrom(in);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
if (queueStats != null ) {
|
||||
out.writeBoolean(true);
|
||||
queueStats.writeTo(out);
|
||||
}else{
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final String DISCOVERY = "discovery";
|
||||
}
|
||||
|
|
|
@ -47,6 +47,7 @@ import org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandler;
|
|||
import org.elasticsearch.discovery.Discovery;
|
||||
import org.elasticsearch.discovery.DiscoverySettings;
|
||||
import org.elasticsearch.discovery.DiscoveryStats;
|
||||
import org.elasticsearch.discovery.zen.publish.PendingClusterStateStats;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Optional;
|
||||
|
@ -281,7 +282,7 @@ public class LocalDiscovery extends AbstractLifecycleComponent implements Discov
|
|||
|
||||
@Override
|
||||
public DiscoveryStats stats() {
|
||||
return new DiscoveryStats(null);
|
||||
return new DiscoveryStats((PendingClusterStateStats)null);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.discovery.zen.publish;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
|
@ -30,15 +30,11 @@ import java.io.IOException;
|
|||
/**
|
||||
* Class encapsulating stats about the PendingClusterStatsQueue
|
||||
*/
|
||||
public class PendingClusterStateStats implements Streamable, ToXContent {
|
||||
public class PendingClusterStateStats implements Writeable, ToXContent {
|
||||
|
||||
private int total;
|
||||
private int pending;
|
||||
private int committed;
|
||||
|
||||
public PendingClusterStateStats() {
|
||||
|
||||
}
|
||||
private final int total;
|
||||
private final int pending;
|
||||
private final int committed;
|
||||
|
||||
public PendingClusterStateStats(int total, int pending, int committed) {
|
||||
this.total = total;
|
||||
|
@ -46,6 +42,19 @@ public class PendingClusterStateStats implements Streamable, ToXContent {
|
|||
this.committed = committed;
|
||||
}
|
||||
|
||||
public PendingClusterStateStats(StreamInput in) throws IOException {
|
||||
total = in.readVInt();
|
||||
pending = in.readVInt();
|
||||
committed = in.readVInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(total);
|
||||
out.writeVInt(pending);
|
||||
out.writeVInt(committed);
|
||||
}
|
||||
|
||||
public int getCommitted() {
|
||||
return committed;
|
||||
}
|
||||
|
@ -68,20 +77,6 @@ public class PendingClusterStateStats implements Streamable, ToXContent {
|
|||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
total = in.readVInt();
|
||||
pending = in.readVInt();
|
||||
committed = in.readVInt();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(total);
|
||||
out.writeVInt(pending);
|
||||
out.writeVInt(committed);
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final String QUEUE = "cluster_state_queue";
|
||||
static final String TOTAL = "total";
|
||||
|
|
|
@ -21,42 +21,23 @@ package org.elasticsearch.http;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class HttpStats implements Streamable, ToXContent {
|
||||
public class HttpStats implements Writeable, ToXContent {
|
||||
|
||||
private long serverOpen;
|
||||
private long totalOpen;
|
||||
|
||||
HttpStats() {
|
||||
|
||||
}
|
||||
private final long serverOpen;
|
||||
private final long totalOpen;
|
||||
|
||||
public HttpStats(long serverOpen, long totalOpen) {
|
||||
this.serverOpen = serverOpen;
|
||||
this.totalOpen = totalOpen;
|
||||
}
|
||||
|
||||
public long getServerOpen() {
|
||||
return this.serverOpen;
|
||||
}
|
||||
|
||||
public long getTotalOpen() {
|
||||
return this.totalOpen;
|
||||
}
|
||||
|
||||
public static HttpStats readHttpStats(StreamInput in) throws IOException {
|
||||
HttpStats stats = new HttpStats();
|
||||
stats.readFrom(in);
|
||||
return stats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
public HttpStats(StreamInput in) throws IOException {
|
||||
serverOpen = in.readVLong();
|
||||
totalOpen = in.readVLong();
|
||||
}
|
||||
|
@ -67,6 +48,14 @@ public class HttpStats implements Streamable, ToXContent {
|
|||
out.writeVLong(totalOpen);
|
||||
}
|
||||
|
||||
public long getServerOpen() {
|
||||
return this.serverOpen;
|
||||
}
|
||||
|
||||
public long getTotalOpen() {
|
||||
return this.totalOpen;
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final String HTTP = "http";
|
||||
static final String CURRENT_OPEN = "current_open";
|
||||
|
|
|
@ -263,8 +263,9 @@ public final class IndexSettings {
|
|||
scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_MAX_MERGED_SEGMENT_SETTING, mergePolicyConfig::setMaxMergedSegment);
|
||||
scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_SEGMENTS_PER_TIER_SETTING, mergePolicyConfig::setSegmentsPerTier);
|
||||
scopedSettings.addSettingsUpdateConsumer(MergePolicyConfig.INDEX_MERGE_POLICY_RECLAIM_DELETES_WEIGHT_SETTING, mergePolicyConfig::setReclaimDeletesWeight);
|
||||
scopedSettings.addSettingsUpdateConsumer(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING, mergeSchedulerConfig::setMaxThreadCount);
|
||||
scopedSettings.addSettingsUpdateConsumer(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING, mergeSchedulerConfig::setMaxMergeCount);
|
||||
|
||||
scopedSettings.addSettingsUpdateConsumer(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING, MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING,
|
||||
mergeSchedulerConfig::setMaxThreadAndMergeCount);
|
||||
scopedSettings.addSettingsUpdateConsumer(MergeSchedulerConfig.AUTO_THROTTLE_SETTING, mergeSchedulerConfig::setAutoThrottle);
|
||||
scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_DURABILITY_SETTING, this::setTranslogDurability);
|
||||
scopedSettings.addSettingsUpdateConsumer(INDEX_TTL_DISABLE_PURGE_SETTING, this::setTTLPurgeDisabled);
|
||||
|
|
|
@ -69,13 +69,14 @@ public final class MergeSchedulerConfig {
|
|||
private volatile int maxMergeCount;
|
||||
|
||||
MergeSchedulerConfig(IndexSettings indexSettings) {
|
||||
maxThreadCount = indexSettings.getValue(MAX_THREAD_COUNT_SETTING);
|
||||
maxMergeCount = indexSettings.getValue(MAX_MERGE_COUNT_SETTING);
|
||||
setMaxThreadAndMergeCount(indexSettings.getValue(MAX_THREAD_COUNT_SETTING),
|
||||
indexSettings.getValue(MAX_MERGE_COUNT_SETTING));
|
||||
this.autoThrottle = indexSettings.getValue(AUTO_THROTTLE_SETTING);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> iff auto throttle is enabled.
|
||||
*
|
||||
* @see ConcurrentMergeScheduler#enableAutoIOThrottle()
|
||||
*/
|
||||
public boolean isAutoThrottle() {
|
||||
|
@ -100,8 +101,19 @@ public final class MergeSchedulerConfig {
|
|||
* Expert: directly set the maximum number of merge threads and
|
||||
* simultaneous merges allowed.
|
||||
*/
|
||||
void setMaxThreadCount(int maxThreadCount) {
|
||||
void setMaxThreadAndMergeCount(int maxThreadCount, int maxMergeCount) {
|
||||
if (maxThreadCount < 1) {
|
||||
throw new IllegalArgumentException("maxThreadCount should be at least 1");
|
||||
}
|
||||
if (maxMergeCount < 1) {
|
||||
throw new IllegalArgumentException("maxMergeCount should be at least 1");
|
||||
}
|
||||
if (maxThreadCount > maxMergeCount) {
|
||||
throw new IllegalArgumentException("maxThreadCount (= " + maxThreadCount +
|
||||
") should be <= maxMergeCount (= " + maxMergeCount + ")");
|
||||
}
|
||||
this.maxThreadCount = maxThreadCount;
|
||||
this.maxMergeCount = maxMergeCount;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -110,12 +122,4 @@ public final class MergeSchedulerConfig {
|
|||
public int getMaxMergeCount() {
|
||||
return maxMergeCount;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* Expert: set the maximum number of simultaneous merges allowed.
|
||||
*/
|
||||
void setMaxMergeCount(int maxMergeCount) {
|
||||
this.maxMergeCount = maxMergeCount;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -198,52 +198,6 @@ public enum VersionType implements Writeable {
|
|||
return version >= 0L || version == Versions.MATCH_ANY;
|
||||
}
|
||||
|
||||
},
|
||||
/**
|
||||
* Warning: this version type should be used with care. Concurrent indexing may result in loss of data on replicas
|
||||
*/
|
||||
FORCE((byte) 3) {
|
||||
@Override
|
||||
public boolean isVersionConflictForWrites(long currentVersion, long expectedVersion, boolean deleted) {
|
||||
if (currentVersion == Versions.NOT_FOUND) {
|
||||
return false;
|
||||
}
|
||||
if (expectedVersion == Versions.MATCH_ANY) {
|
||||
throw new IllegalStateException("you must specify a version when use VersionType.FORCE");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String explainConflictForWrites(long currentVersion, long expectedVersion, boolean deleted) {
|
||||
throw new AssertionError("VersionType.FORCE should never result in a write conflict");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isVersionConflictForReads(long currentVersion, long expectedVersion) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String explainConflictForReads(long currentVersion, long expectedVersion) {
|
||||
throw new AssertionError("VersionType.FORCE should never result in a read conflict");
|
||||
}
|
||||
|
||||
@Override
|
||||
public long updateVersion(long currentVersion, long expectedVersion) {
|
||||
return expectedVersion;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validateVersionForWrites(long version) {
|
||||
return version >= 0L;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validateVersionForReads(long version) {
|
||||
return version >= 0L || version == Versions.MATCH_ANY;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
private final byte value;
|
||||
|
@ -337,8 +291,6 @@ public enum VersionType implements Writeable {
|
|||
return EXTERNAL;
|
||||
} else if ("external_gte".equals(versionType)) {
|
||||
return EXTERNAL_GTE;
|
||||
} else if ("force".equals(versionType)) {
|
||||
return FORCE;
|
||||
}
|
||||
throw new IllegalArgumentException("No version type match [" + versionType + "]");
|
||||
}
|
||||
|
@ -357,8 +309,6 @@ public enum VersionType implements Writeable {
|
|||
return EXTERNAL;
|
||||
} else if (value == 2) {
|
||||
return EXTERNAL_GTE;
|
||||
} else if (value == 3) {
|
||||
return FORCE;
|
||||
}
|
||||
throw new IllegalArgumentException("No version type match [" + value + "]");
|
||||
}
|
||||
|
|
|
@ -174,7 +174,7 @@ public final class AnalysisRegistry implements Closeable {
|
|||
Settings currentSettings = tokenizerSettings.get(tokenizer);
|
||||
return getAnalysisProvider("tokenizer", tokenizers, tokenizer, currentSettings.get("type"));
|
||||
} else {
|
||||
return prebuiltAnalysis.tokenizerFactories.get(tokenizer);
|
||||
return getTokenizerProvider(tokenizer);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -202,7 +202,7 @@ public final class AnalysisRegistry implements Closeable {
|
|||
return getAnalysisProvider("tokenfilter", tokenFilters, tokenFilter, typeName);
|
||||
}
|
||||
} else {
|
||||
return prebuiltAnalysis.tokenFilterFactories.get(tokenFilter);
|
||||
return getTokenFilterProvider(tokenFilter);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -220,7 +220,7 @@ public final class AnalysisRegistry implements Closeable {
|
|||
Settings currentSettings = tokenFilterSettings.get(charFilter);
|
||||
return getAnalysisProvider("charfilter", charFilters, charFilter, currentSettings.get("type"));
|
||||
} else {
|
||||
return prebuiltAnalysis.charFilterFactories.get(charFilter);
|
||||
return getCharFilterProvider(charFilter);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -496,7 +496,10 @@ public class DateFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
protected void parseCreateField(ParseContext originalContext, List<Field> fields) throws IOException {
|
||||
// Date fields, by default, will not be included in _all
|
||||
final ParseContext context = originalContext.setIncludeInAllDefault(false);
|
||||
|
||||
String dateAsString;
|
||||
if (context.externalValueSet()) {
|
||||
Object dateAsObject = context.externalValue();
|
||||
|
|
|
@ -110,7 +110,10 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException {
|
||||
protected void parse(ParseContext originalContext, GeoPoint point, String geoHash) throws IOException {
|
||||
// Geopoint fields, by default, will not be included in _all
|
||||
final ParseContext context = originalContext.setIncludeInAllDefault(false);
|
||||
|
||||
if (ignoreMalformed.value() == false) {
|
||||
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
|
||||
|
|
|
@ -430,7 +430,10 @@ public class GeoShapeFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Mapper parse(ParseContext context) throws IOException {
|
||||
public Mapper parse(ParseContext originalContext) throws IOException {
|
||||
// Numeric fields, by default, will not be included in _all
|
||||
final ParseContext context = originalContext.setIncludeInAllDefault(false);
|
||||
|
||||
try {
|
||||
Shape shape = context.parseExternalValue(Shape.class);
|
||||
if (shape == null) {
|
||||
|
|
|
@ -285,7 +285,10 @@ public class IpFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
protected void parseCreateField(ParseContext originalContext, List<Field> fields) throws IOException {
|
||||
// IP fields, by default, will not be included in _all
|
||||
final ParseContext context = originalContext.setIncludeInAllDefault(false);
|
||||
|
||||
Object addressAsObject;
|
||||
if (context.externalValueSet()) {
|
||||
addressAsObject = context.externalValue();
|
||||
|
|
|
@ -895,7 +895,9 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
protected void parseCreateField(ParseContext originalContext, List<Field> fields) throws IOException {
|
||||
// Numeric fields, by default, will not be included in _all
|
||||
final ParseContext context = originalContext.setIncludeInAllDefault(false);
|
||||
final boolean includeInAll = context.includeInAll(this.includeInAll, this);
|
||||
|
||||
XContentParser parser = context.parser();
|
||||
|
|
|
@ -364,7 +364,9 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
protected void parseCreateField(ParseContext originalContext, List<Field> fields) throws IOException {
|
||||
// Numeric fields, by default, will not be included in _all
|
||||
final ParseContext context = originalContext.setIncludeInAllDefault(false);
|
||||
final boolean includeInAll = context.includeInAll(this.includeInAll, this);
|
||||
|
||||
XContentParser parser = context.parser();
|
||||
|
|
|
@ -275,15 +275,15 @@ public class SourceFieldMapper extends MetadataFieldMapper {
|
|||
}
|
||||
|
||||
if (includes != null) {
|
||||
builder.field("includes", includes);
|
||||
builder.array("includes", includes);
|
||||
} else if (includeDefaults) {
|
||||
builder.field("includes", Strings.EMPTY_ARRAY);
|
||||
builder.array("includes", Strings.EMPTY_ARRAY);
|
||||
}
|
||||
|
||||
if (excludes != null) {
|
||||
builder.field("excludes", excludes);
|
||||
builder.array("excludes", excludes);
|
||||
} else if (includeDefaults) {
|
||||
builder.field("excludes", Strings.EMPTY_ARRAY);
|
||||
builder.array("excludes", Strings.EMPTY_ARRAY);
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
|
|
|
@ -132,7 +132,7 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder<IndicesQueryBuilde
|
|||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
builder.field(INDICES_FIELD.getPreferredName(), indices);
|
||||
builder.array(INDICES_FIELD.getPreferredName(), indices);
|
||||
builder.field(QUERY_FIELD.getPreferredName());
|
||||
innerQuery.toXContent(builder, params);
|
||||
builder.field(NO_MATCH_QUERY.getPreferredName());
|
||||
|
|
|
@ -36,7 +36,6 @@ import org.elasticsearch.search.builder.SearchSourceBuilder;
|
|||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
|
@ -572,12 +571,7 @@ public final class InnerHitBuilder extends ToXContentToBytes implements Writeabl
|
|||
innerHitsContext.storedFieldsContext(storedFieldsContext);
|
||||
}
|
||||
if (docValueFields != null) {
|
||||
DocValueFieldsContext docValueFieldsContext = innerHitsContext
|
||||
.getFetchSubPhaseContext(DocValueFieldsFetchSubPhase.CONTEXT_FACTORY);
|
||||
for (String field : docValueFields) {
|
||||
docValueFieldsContext.add(new DocValueFieldsContext.DocValueField(field));
|
||||
}
|
||||
docValueFieldsContext.setHitExecutionNeeded(true);
|
||||
innerHitsContext.docValueFieldsContext(new DocValueFieldsContext(docValueFields));
|
||||
}
|
||||
if (scriptFields != null) {
|
||||
for (ScriptField field : scriptFields) {
|
||||
|
|
|
@ -780,7 +780,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
if (fields != null) {
|
||||
builder.field(Field.FIELDS.getPreferredName(), fields);
|
||||
builder.array(Field.FIELDS.getPreferredName(), fields);
|
||||
}
|
||||
buildLikeField(builder, Field.LIKE.getPreferredName(), likeTexts, likeItems);
|
||||
buildLikeField(builder, Field.UNLIKE.getPreferredName(), unlikeTexts, unlikeItems);
|
||||
|
@ -791,7 +791,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||
builder.field(Field.MIN_WORD_LENGTH.getPreferredName(), minWordLength);
|
||||
builder.field(Field.MAX_WORD_LENGTH.getPreferredName(), maxWordLength);
|
||||
if (stopWords != null) {
|
||||
builder.field(Field.STOP_WORDS.getPreferredName(), stopWords);
|
||||
builder.array(Field.STOP_WORDS.getPreferredName(), stopWords);
|
||||
}
|
||||
if (analyzer != null) {
|
||||
builder.field(Field.ANALYZER.getPreferredName(), analyzer);
|
||||
|
|
|
@ -48,7 +48,6 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder<SpanNearQueryBuil
|
|||
public static boolean DEFAULT_IN_ORDER = true;
|
||||
|
||||
private static final ParseField SLOP_FIELD = new ParseField("slop");
|
||||
private static final ParseField COLLECT_PAYLOADS_FIELD = new ParseField("collect_payloads").withAllDeprecated("no longer supported");
|
||||
private static final ParseField CLAUSES_FIELD = new ParseField("clauses");
|
||||
private static final ParseField IN_ORDER_FIELD = new ParseField("in_order");
|
||||
|
||||
|
@ -175,8 +174,6 @@ public class SpanNearQueryBuilder extends AbstractQueryBuilder<SpanNearQueryBuil
|
|||
} else if (token.isValue()) {
|
||||
if (parseContext.getParseFieldMatcher().match(currentFieldName, IN_ORDER_FIELD)) {
|
||||
inOrder = parser.booleanValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, COLLECT_PAYLOADS_FIELD)) {
|
||||
// Deprecated in 3.0.0
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, SLOP_FIELD)) {
|
||||
slop = parser.intValue();
|
||||
} else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
|
|
|
@ -35,12 +35,12 @@ import org.elasticsearch.index.engine.SegmentsStats;
|
|||
import org.elasticsearch.index.fielddata.FieldDataStats;
|
||||
import org.elasticsearch.index.flush.FlushStats;
|
||||
import org.elasticsearch.index.get.GetStats;
|
||||
import org.elasticsearch.index.shard.IndexingStats;
|
||||
import org.elasticsearch.index.merge.MergeStats;
|
||||
import org.elasticsearch.index.recovery.RecoveryStats;
|
||||
import org.elasticsearch.index.refresh.RefreshStats;
|
||||
import org.elasticsearch.index.search.stats.SearchStats;
|
||||
import org.elasticsearch.index.shard.DocsStats;
|
||||
import org.elasticsearch.index.shard.IndexingStats;
|
||||
import org.elasticsearch.index.store.StoreStats;
|
||||
import org.elasticsearch.search.suggest.completion.CompletionStats;
|
||||
|
||||
|
@ -154,7 +154,7 @@ public class NodeIndicesStats implements Streamable, ToXContent {
|
|||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
stats = CommonStats.readCommonStats(in);
|
||||
stats = new CommonStats(in);
|
||||
if (in.readBoolean()) {
|
||||
int entries = in.readVInt();
|
||||
statsByShard = new HashMap<>();
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.indices.breaker;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
|
@ -30,18 +30,23 @@ import java.io.IOException;
|
|||
/**
|
||||
* Stats class encapsulating all of the different circuit breaker stats
|
||||
*/
|
||||
public class AllCircuitBreakerStats implements Streamable, ToXContent {
|
||||
public class AllCircuitBreakerStats implements Writeable, ToXContent {
|
||||
|
||||
private CircuitBreakerStats[] allStats = new CircuitBreakerStats[0];
|
||||
|
||||
public AllCircuitBreakerStats() {
|
||||
|
||||
}
|
||||
private final CircuitBreakerStats[] allStats;
|
||||
|
||||
public AllCircuitBreakerStats(CircuitBreakerStats[] allStats) {
|
||||
this.allStats = allStats;
|
||||
}
|
||||
|
||||
public AllCircuitBreakerStats(StreamInput in) throws IOException {
|
||||
allStats = in.readArray(CircuitBreakerStats::new, CircuitBreakerStats[]::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeArray(allStats);
|
||||
}
|
||||
|
||||
public CircuitBreakerStats[] getAllStats() {
|
||||
return this.allStats;
|
||||
}
|
||||
|
@ -55,33 +60,6 @@ public class AllCircuitBreakerStats implements Streamable, ToXContent {
|
|||
return null;
|
||||
}
|
||||
|
||||
public static AllCircuitBreakerStats readOptionalAllCircuitBreakerStats(StreamInput in) throws IOException {
|
||||
AllCircuitBreakerStats stats = in.readOptionalStreamable(AllCircuitBreakerStats::new);
|
||||
return stats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
int statCount = in.readVInt();
|
||||
CircuitBreakerStats[] newStats = new CircuitBreakerStats[statCount];
|
||||
for (int i = 0; i < statCount; i++) {
|
||||
CircuitBreakerStats stats = new CircuitBreakerStats();
|
||||
stats.readFrom(in);
|
||||
newStats[i] = stats;
|
||||
}
|
||||
allStats = newStats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(allStats.length);
|
||||
for (CircuitBreakerStats stats : allStats) {
|
||||
if (stats != null) {
|
||||
stats.writeTo(out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(Fields.BREAKERS);
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.indices.breaker;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -32,17 +32,13 @@ import java.util.Locale;
|
|||
/**
|
||||
* Class encapsulating stats about the circuit breaker
|
||||
*/
|
||||
public class CircuitBreakerStats implements Streamable, ToXContent {
|
||||
public class CircuitBreakerStats implements Writeable, ToXContent {
|
||||
|
||||
private String name;
|
||||
private long limit;
|
||||
private long estimated;
|
||||
private long trippedCount;
|
||||
private double overhead;
|
||||
|
||||
CircuitBreakerStats() {
|
||||
|
||||
}
|
||||
private final String name;
|
||||
private final long limit;
|
||||
private final long estimated;
|
||||
private final long trippedCount;
|
||||
private final double overhead;
|
||||
|
||||
public CircuitBreakerStats(String name, long limit, long estimated, double overhead, long trippedCount) {
|
||||
this.name = name;
|
||||
|
@ -52,6 +48,23 @@ public class CircuitBreakerStats implements Streamable, ToXContent {
|
|||
this.overhead = overhead;
|
||||
}
|
||||
|
||||
public CircuitBreakerStats(StreamInput in) throws IOException {
|
||||
limit = in.readLong();
|
||||
estimated = in.readLong();
|
||||
overhead = in.readDouble();
|
||||
this.trippedCount = in.readLong();
|
||||
this.name = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeLong(limit);
|
||||
out.writeLong(estimated);
|
||||
out.writeDouble(overhead);
|
||||
out.writeLong(trippedCount);
|
||||
out.writeString(name);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
@ -72,30 +85,6 @@ public class CircuitBreakerStats implements Streamable, ToXContent {
|
|||
return this.overhead;
|
||||
}
|
||||
|
||||
public static CircuitBreakerStats readOptionalCircuitBreakerStats(StreamInput in) throws IOException {
|
||||
CircuitBreakerStats stats = in.readOptionalStreamable(CircuitBreakerStats::new);
|
||||
return stats;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
// limit is the maximum from the old circuit breaker stats for backwards compatibility
|
||||
limit = in.readLong();
|
||||
estimated = in.readLong();
|
||||
overhead = in.readDouble();
|
||||
this.trippedCount = in.readLong();
|
||||
this.name = in.readString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeLong(limit);
|
||||
out.writeLong(estimated);
|
||||
out.writeDouble(overhead);
|
||||
out.writeLong(trippedCount);
|
||||
out.writeString(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(name.toLowerCase(Locale.ROOT));
|
||||
|
|
|
@ -260,9 +260,9 @@ public class RecoveryState implements ToXContent, Streamable {
|
|||
builder.field(Fields.TYPE, recoverySource.getType());
|
||||
builder.field(Fields.STAGE, stage.toString());
|
||||
builder.field(Fields.PRIMARY, primary);
|
||||
builder.dateValueField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, timer.startTime);
|
||||
builder.dateField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, timer.startTime);
|
||||
if (timer.stopTime > 0) {
|
||||
builder.dateValueField(Fields.STOP_TIME_IN_MILLIS, Fields.STOP_TIME, timer.stopTime);
|
||||
builder.dateField(Fields.STOP_TIME_IN_MILLIS, Fields.STOP_TIME, timer.stopTime);
|
||||
}
|
||||
builder.timeValueField(Fields.TOTAL_TIME_IN_MILLIS, Fields.TOTAL_TIME, timer.time());
|
||||
|
||||
|
|
|
@ -107,15 +107,14 @@ public final class ConfigurationUtils {
|
|||
value.getClass().getName() + "]");
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns and removes the specified property from the specified configuration map.
|
||||
*
|
||||
* If the property value isn't of type int a {@link ElasticsearchParseException} is thrown.
|
||||
* If the property is missing an {@link ElasticsearchParseException} is thrown
|
||||
*/
|
||||
public static int readIntProperty(String processorType, String processorTag, Map<String, Object> configuration,
|
||||
String propertyName, int defaultValue) {
|
||||
public static Integer readIntProperty(String processorType, String processorTag, Map<String, Object> configuration,
|
||||
String propertyName, Integer defaultValue) {
|
||||
Object value = configuration.remove(propertyName);
|
||||
if (value == null) {
|
||||
return defaultValue;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.ingest;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -33,16 +34,21 @@ public final class Pipeline {
|
|||
|
||||
static final String DESCRIPTION_KEY = "description";
|
||||
static final String PROCESSORS_KEY = "processors";
|
||||
static final String VERSION_KEY = "version";
|
||||
static final String ON_FAILURE_KEY = "on_failure";
|
||||
|
||||
private final String id;
|
||||
@Nullable
|
||||
private final String description;
|
||||
@Nullable
|
||||
private final Integer version;
|
||||
private final CompoundProcessor compoundProcessor;
|
||||
|
||||
public Pipeline(String id, String description, CompoundProcessor compoundProcessor) {
|
||||
public Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor) {
|
||||
this.id = id;
|
||||
this.description = description;
|
||||
this.compoundProcessor = compoundProcessor;
|
||||
this.version = version;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -62,10 +68,21 @@ public final class Pipeline {
|
|||
/**
|
||||
* An optional description of what this pipeline is doing to the data gets processed by this pipeline.
|
||||
*/
|
||||
@Nullable
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
/**
|
||||
* An optional version stored with the pipeline so that it can be used to determine if the pipeline should be updated / replaced.
|
||||
*
|
||||
* @return {@code null} if not supplied.
|
||||
*/
|
||||
@Nullable
|
||||
public Integer getVersion() {
|
||||
return version;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the underlying {@link CompoundProcessor} containing the Pipeline's processors
|
||||
*/
|
||||
|
@ -100,6 +117,7 @@ public final class Pipeline {
|
|||
|
||||
public Pipeline create(String id, Map<String, Object> config, Map<String, Processor.Factory> processorFactories) throws Exception {
|
||||
String description = ConfigurationUtils.readOptionalStringProperty(null, null, config, DESCRIPTION_KEY);
|
||||
Integer version = ConfigurationUtils.readIntProperty(null, null, config, VERSION_KEY, null);
|
||||
List<Map<String, Map<String, Object>>> processorConfigs = ConfigurationUtils.readList(null, null, config, PROCESSORS_KEY);
|
||||
List<Processor> processors = ConfigurationUtils.readProcessorConfigs(processorConfigs, processorFactories);
|
||||
List<Map<String, Map<String, Object>>> onFailureProcessorConfigs =
|
||||
|
@ -114,7 +132,7 @@ public final class Pipeline {
|
|||
}
|
||||
CompoundProcessor compoundProcessor = new CompoundProcessor(false, Collections.unmodifiableList(processors),
|
||||
Collections.unmodifiableList(onFailureProcessors));
|
||||
return new Pipeline(id, description, compoundProcessor);
|
||||
return new Pipeline(id, description, version, compoundProcessor);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -386,6 +386,30 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContent {
|
|||
out.writeLong(totalWriteKilobytes);
|
||||
}
|
||||
|
||||
public DeviceStats[] getDevicesStats() {
|
||||
return devicesStats;
|
||||
}
|
||||
|
||||
public long getTotalOperations() {
|
||||
return totalOperations;
|
||||
}
|
||||
|
||||
public long getTotalReadOperations() {
|
||||
return totalReadOperations;
|
||||
}
|
||||
|
||||
public long getTotalWriteOperations() {
|
||||
return totalWriteOperations;
|
||||
}
|
||||
|
||||
public long getTotalReadKilobytes() {
|
||||
return totalReadKilobytes;
|
||||
}
|
||||
|
||||
public long getTotalWriteKilobytes() {
|
||||
return totalWriteKilobytes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (devicesStats.length > 0) {
|
||||
|
@ -410,16 +434,16 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContent {
|
|||
|
||||
}
|
||||
|
||||
final long timestamp;
|
||||
final Path[] paths;
|
||||
final IoStats ioStats;
|
||||
Path total;
|
||||
private final long timestamp;
|
||||
private final Path[] paths;
|
||||
private final IoStats ioStats;
|
||||
private final Path total;
|
||||
|
||||
public FsInfo(long timestamp, IoStats ioStats, Path[] paths) {
|
||||
this.timestamp = timestamp;
|
||||
this.ioStats = ioStats;
|
||||
this.paths = paths;
|
||||
this.total = null;
|
||||
this.total = total();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -432,6 +456,7 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContent {
|
|||
for (int i = 0; i < paths.length; i++) {
|
||||
paths[i] = new Path(in);
|
||||
}
|
||||
this.total = total();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -445,13 +470,10 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContent {
|
|||
}
|
||||
|
||||
public Path getTotal() {
|
||||
return total();
|
||||
return total;
|
||||
}
|
||||
|
||||
public Path total() {
|
||||
if (total != null) {
|
||||
return total;
|
||||
}
|
||||
private Path total() {
|
||||
Path res = new Path();
|
||||
Set<String> seenDevices = new HashSet<>(paths.length);
|
||||
for (Path subPath : paths) {
|
||||
|
@ -462,7 +484,6 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContent {
|
|||
}
|
||||
res.add(subPath);
|
||||
}
|
||||
total = res;
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -506,5 +527,4 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContent {
|
|||
static final String TOTAL = "total";
|
||||
static final String IO_STATS = "io_stats";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -431,7 +431,7 @@ public class JvmInfo implements Writeable, ToXContent {
|
|||
builder.field(Fields.VM_NAME, vmName);
|
||||
builder.field(Fields.VM_VERSION, vmVersion);
|
||||
builder.field(Fields.VM_VENDOR, vmVendor);
|
||||
builder.dateValueField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, startTime);
|
||||
builder.dateField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, startTime);
|
||||
|
||||
builder.startObject(Fields.MEM);
|
||||
builder.byteSizeField(Fields.HEAP_INIT_IN_BYTES, Fields.HEAP_INIT, mem.heapInit);
|
||||
|
@ -441,8 +441,8 @@ public class JvmInfo implements Writeable, ToXContent {
|
|||
builder.byteSizeField(Fields.DIRECT_MAX_IN_BYTES, Fields.DIRECT_MAX, mem.directMemoryMax);
|
||||
builder.endObject();
|
||||
|
||||
builder.field(Fields.GC_COLLECTORS, gcCollectors);
|
||||
builder.field(Fields.MEMORY_POOLS, memoryPools);
|
||||
builder.array(Fields.GC_COLLECTORS, gcCollectors);
|
||||
builder.array(Fields.MEMORY_POOLS, memoryPools);
|
||||
|
||||
builder.field(Fields.USING_COMPRESSED_OOPS, useCompressedOops);
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.monitor.jvm;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -39,14 +39,12 @@ import java.lang.management.RuntimeMXBean;
|
|||
import java.lang.management.ThreadMXBean;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class JvmStats implements Streamable, ToXContent {
|
||||
public class JvmStats implements Writeable, ToXContent {
|
||||
|
||||
private static final RuntimeMXBean runtimeMXBean;
|
||||
private static final MemoryMXBean memoryMXBean;
|
||||
|
@ -61,21 +59,17 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
}
|
||||
|
||||
public static JvmStats jvmStats() {
|
||||
JvmStats stats = new JvmStats(System.currentTimeMillis(), runtimeMXBean.getUptime());
|
||||
stats.mem = new Mem();
|
||||
MemoryUsage memUsage = memoryMXBean.getHeapMemoryUsage();
|
||||
stats.mem.heapUsed = memUsage.getUsed() < 0 ? 0 : memUsage.getUsed();
|
||||
stats.mem.heapCommitted = memUsage.getCommitted() < 0 ? 0 : memUsage.getCommitted();
|
||||
stats.mem.heapMax = memUsage.getMax() < 0 ? 0 : memUsage.getMax();
|
||||
long heapUsed = memUsage.getUsed() < 0 ? 0 : memUsage.getUsed();
|
||||
long heapCommitted = memUsage.getCommitted() < 0 ? 0 : memUsage.getCommitted();
|
||||
long heapMax = memUsage.getMax() < 0 ? 0 : memUsage.getMax();
|
||||
memUsage = memoryMXBean.getNonHeapMemoryUsage();
|
||||
stats.mem.nonHeapUsed = memUsage.getUsed() < 0 ? 0 : memUsage.getUsed();
|
||||
stats.mem.nonHeapCommitted = memUsage.getCommitted() < 0 ? 0 : memUsage.getCommitted();
|
||||
|
||||
long nonHeapUsed = memUsage.getUsed() < 0 ? 0 : memUsage.getUsed();
|
||||
long nonHeapCommitted = memUsage.getCommitted() < 0 ? 0 : memUsage.getCommitted();
|
||||
List<MemoryPoolMXBean> memoryPoolMXBeans = ManagementFactory.getMemoryPoolMXBeans();
|
||||
List<MemoryPool> pools = new ArrayList<>();
|
||||
for (int i = 0; i < memoryPoolMXBeans.size(); i++) {
|
||||
for (MemoryPoolMXBean memoryPoolMXBean : memoryPoolMXBeans) {
|
||||
try {
|
||||
MemoryPoolMXBean memoryPoolMXBean = memoryPoolMXBeans.get(i);
|
||||
MemoryUsage usage = memoryPoolMXBean.getUsage();
|
||||
MemoryUsage peakUsage = memoryPoolMXBean.getPeakUsage();
|
||||
String name = GcNames.getByMemoryPoolName(memoryPoolMXBean.getName(), null);
|
||||
|
@ -94,55 +88,74 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
* we just omit the pool in that case!*/
|
||||
}
|
||||
}
|
||||
stats.mem.pools = pools.toArray(new MemoryPool[pools.size()]);
|
||||
|
||||
stats.threads = new Threads();
|
||||
stats.threads.count = threadMXBean.getThreadCount();
|
||||
stats.threads.peakCount = threadMXBean.getPeakThreadCount();
|
||||
Mem mem = new Mem(heapCommitted, heapUsed, heapMax, nonHeapCommitted, nonHeapUsed, Collections.unmodifiableList(pools));
|
||||
Threads threads = new Threads(threadMXBean.getThreadCount(), threadMXBean.getPeakThreadCount());
|
||||
|
||||
List<GarbageCollectorMXBean> gcMxBeans = ManagementFactory.getGarbageCollectorMXBeans();
|
||||
stats.gc = new GarbageCollectors();
|
||||
stats.gc.collectors = new GarbageCollector[gcMxBeans.size()];
|
||||
for (int i = 0; i < stats.gc.collectors.length; i++) {
|
||||
GarbageCollector[] collectors = new GarbageCollector[gcMxBeans.size()];
|
||||
for (int i = 0; i < collectors.length; i++) {
|
||||
GarbageCollectorMXBean gcMxBean = gcMxBeans.get(i);
|
||||
stats.gc.collectors[i] = new GarbageCollector();
|
||||
stats.gc.collectors[i].name = GcNames.getByGcName(gcMxBean.getName(), gcMxBean.getName());
|
||||
stats.gc.collectors[i].collectionCount = gcMxBean.getCollectionCount();
|
||||
stats.gc.collectors[i].collectionTime = gcMxBean.getCollectionTime();
|
||||
collectors[i] = new GarbageCollector(GcNames.getByGcName(gcMxBean.getName(), gcMxBean.getName()),
|
||||
gcMxBean.getCollectionCount(), gcMxBean.getCollectionTime());
|
||||
}
|
||||
|
||||
GarbageCollectors garbageCollectors = new GarbageCollectors(collectors);
|
||||
List<BufferPool> bufferPoolsList = Collections.emptyList();
|
||||
try {
|
||||
List<BufferPoolMXBean> bufferPools = ManagementFactory.getPlatformMXBeans(BufferPoolMXBean.class);
|
||||
stats.bufferPools = new ArrayList<>(bufferPools.size());
|
||||
bufferPoolsList = new ArrayList<>(bufferPools.size());
|
||||
for (BufferPoolMXBean bufferPool : bufferPools) {
|
||||
stats.bufferPools.add(new BufferPool(bufferPool.getName(), bufferPool.getCount(), bufferPool.getTotalCapacity(), bufferPool.getMemoryUsed()));
|
||||
bufferPoolsList.add(new BufferPool(bufferPool.getName(), bufferPool.getCount(),
|
||||
bufferPool.getTotalCapacity(), bufferPool.getMemoryUsed()));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// buffer pools are not available
|
||||
}
|
||||
|
||||
stats.classes = new Classes();
|
||||
stats.classes.loadedClassCount = classLoadingMXBean.getLoadedClassCount();
|
||||
stats.classes.totalLoadedClassCount = classLoadingMXBean.getTotalLoadedClassCount();
|
||||
stats.classes.unloadedClassCount = classLoadingMXBean.getUnloadedClassCount();
|
||||
Classes classes = new Classes(classLoadingMXBean.getLoadedClassCount(), classLoadingMXBean.getTotalLoadedClassCount(),
|
||||
classLoadingMXBean.getUnloadedClassCount());
|
||||
|
||||
return stats;
|
||||
return new JvmStats(System.currentTimeMillis(), runtimeMXBean.getUptime(), mem, threads,
|
||||
garbageCollectors, bufferPoolsList, classes);
|
||||
}
|
||||
|
||||
long timestamp = -1;
|
||||
long uptime;
|
||||
Mem mem;
|
||||
Threads threads;
|
||||
GarbageCollectors gc;
|
||||
List<BufferPool> bufferPools;
|
||||
Classes classes;
|
||||
private final long timestamp;
|
||||
private final long uptime;
|
||||
private final Mem mem;
|
||||
private final Threads threads;
|
||||
private final GarbageCollectors gc;
|
||||
private final List<BufferPool> bufferPools;
|
||||
private final Classes classes;
|
||||
|
||||
private JvmStats() {
|
||||
}
|
||||
|
||||
public JvmStats(long timestamp, long uptime) {
|
||||
public JvmStats(long timestamp, long uptime, Mem mem, Threads threads, GarbageCollectors gc,
|
||||
List<BufferPool> bufferPools, Classes classes) {
|
||||
this.timestamp = timestamp;
|
||||
this.uptime = uptime;
|
||||
this.mem = mem;
|
||||
this.threads = threads;
|
||||
this.gc = gc;
|
||||
this.bufferPools = bufferPools;
|
||||
this.classes = classes;
|
||||
}
|
||||
|
||||
public JvmStats(StreamInput in) throws IOException {
|
||||
timestamp = in.readVLong();
|
||||
uptime = in.readVLong();
|
||||
mem = new Mem(in);
|
||||
threads = new Threads(in);
|
||||
gc = new GarbageCollectors(in);
|
||||
bufferPools = in.readList(BufferPool::new);
|
||||
classes = new Classes(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(timestamp);
|
||||
out.writeVLong(uptime);
|
||||
mem.writeTo(out);
|
||||
threads.writeTo(out);
|
||||
gc.writeTo(out);
|
||||
out.writeList(bufferPools);
|
||||
classes.writeTo(out);
|
||||
}
|
||||
|
||||
public long getTimestamp() {
|
||||
|
@ -178,53 +191,50 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
builder.startObject(Fields.JVM);
|
||||
builder.field(Fields.TIMESTAMP, timestamp);
|
||||
builder.timeValueField(Fields.UPTIME_IN_MILLIS, Fields.UPTIME, uptime);
|
||||
if (mem != null) {
|
||||
builder.startObject(Fields.MEM);
|
||||
|
||||
builder.byteSizeField(Fields.HEAP_USED_IN_BYTES, Fields.HEAP_USED, mem.heapUsed);
|
||||
if (mem.getHeapUsedPercent() >= 0) {
|
||||
builder.field(Fields.HEAP_USED_PERCENT, mem.getHeapUsedPercent());
|
||||
}
|
||||
builder.byteSizeField(Fields.HEAP_COMMITTED_IN_BYTES, Fields.HEAP_COMMITTED, mem.heapCommitted);
|
||||
builder.byteSizeField(Fields.HEAP_MAX_IN_BYTES, Fields.HEAP_MAX, mem.heapMax);
|
||||
builder.byteSizeField(Fields.NON_HEAP_USED_IN_BYTES, Fields.NON_HEAP_USED, mem.nonHeapUsed);
|
||||
builder.byteSizeField(Fields.NON_HEAP_COMMITTED_IN_BYTES, Fields.NON_HEAP_COMMITTED, mem.nonHeapCommitted);
|
||||
builder.startObject(Fields.MEM);
|
||||
|
||||
builder.startObject(Fields.POOLS);
|
||||
for (MemoryPool pool : mem) {
|
||||
builder.startObject(pool.getName());
|
||||
builder.byteSizeField(Fields.USED_IN_BYTES, Fields.USED, pool.used);
|
||||
builder.byteSizeField(Fields.MAX_IN_BYTES, Fields.MAX, pool.max);
|
||||
builder.byteSizeField(Fields.HEAP_USED_IN_BYTES, Fields.HEAP_USED, mem.heapUsed);
|
||||
if (mem.getHeapUsedPercent() >= 0) {
|
||||
builder.field(Fields.HEAP_USED_PERCENT, mem.getHeapUsedPercent());
|
||||
}
|
||||
builder.byteSizeField(Fields.HEAP_COMMITTED_IN_BYTES, Fields.HEAP_COMMITTED, mem.heapCommitted);
|
||||
builder.byteSizeField(Fields.HEAP_MAX_IN_BYTES, Fields.HEAP_MAX, mem.heapMax);
|
||||
builder.byteSizeField(Fields.NON_HEAP_USED_IN_BYTES, Fields.NON_HEAP_USED, mem.nonHeapUsed);
|
||||
builder.byteSizeField(Fields.NON_HEAP_COMMITTED_IN_BYTES, Fields.NON_HEAP_COMMITTED, mem.nonHeapCommitted);
|
||||
|
||||
builder.byteSizeField(Fields.PEAK_USED_IN_BYTES, Fields.PEAK_USED, pool.peakUsed);
|
||||
builder.byteSizeField(Fields.PEAK_MAX_IN_BYTES, Fields.PEAK_MAX, pool.peakMax);
|
||||
builder.startObject(Fields.POOLS);
|
||||
for (MemoryPool pool : mem) {
|
||||
builder.startObject(pool.getName());
|
||||
builder.byteSizeField(Fields.USED_IN_BYTES, Fields.USED, pool.used);
|
||||
builder.byteSizeField(Fields.MAX_IN_BYTES, Fields.MAX, pool.max);
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
builder.byteSizeField(Fields.PEAK_USED_IN_BYTES, Fields.PEAK_USED, pool.peakUsed);
|
||||
builder.byteSizeField(Fields.PEAK_MAX_IN_BYTES, Fields.PEAK_MAX, pool.peakMax);
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
if (threads != null) {
|
||||
builder.startObject(Fields.THREADS);
|
||||
builder.field(Fields.COUNT, threads.getCount());
|
||||
builder.field(Fields.PEAK_COUNT, threads.getPeakCount());
|
||||
builder.endObject();
|
||||
|
||||
builder.endObject();
|
||||
|
||||
builder.startObject(Fields.THREADS);
|
||||
builder.field(Fields.COUNT, threads.getCount());
|
||||
builder.field(Fields.PEAK_COUNT, threads.getPeakCount());
|
||||
builder.endObject();
|
||||
|
||||
builder.startObject(Fields.GC);
|
||||
|
||||
builder.startObject(Fields.COLLECTORS);
|
||||
for (GarbageCollector collector : gc) {
|
||||
builder.startObject(collector.getName());
|
||||
builder.field(Fields.COLLECTION_COUNT, collector.getCollectionCount());
|
||||
builder.timeValueField(Fields.COLLECTION_TIME_IN_MILLIS, Fields.COLLECTION_TIME, collector.collectionTime);
|
||||
builder.endObject();
|
||||
}
|
||||
if (gc != null) {
|
||||
builder.startObject(Fields.GC);
|
||||
builder.endObject();
|
||||
|
||||
builder.startObject(Fields.COLLECTORS);
|
||||
for (GarbageCollector collector : gc) {
|
||||
builder.startObject(collector.getName());
|
||||
builder.field(Fields.COLLECTION_COUNT, collector.getCollectionCount());
|
||||
builder.timeValueField(Fields.COLLECTION_TIME_IN_MILLIS, Fields.COLLECTION_TIME, collector.collectionTime);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endObject();
|
||||
|
||||
if (bufferPools != null) {
|
||||
builder.startObject(Fields.BUFFER_POOLS);
|
||||
|
@ -238,13 +248,11 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
builder.endObject();
|
||||
}
|
||||
|
||||
if (classes != null) {
|
||||
builder.startObject(Fields.CLASSES);
|
||||
builder.field(Fields.CURRENT_LOADED_COUNT, classes.getLoadedClassCount());
|
||||
builder.field(Fields.TOTAL_LOADED_COUNT, classes.getTotalLoadedClassCount());
|
||||
builder.field(Fields.TOTAL_UNLOADED_COUNT, classes.getUnloadedClassCount());
|
||||
builder.endObject();
|
||||
}
|
||||
builder.startObject(Fields.CLASSES);
|
||||
builder.field(Fields.CURRENT_LOADED_COUNT, classes.getLoadedClassCount());
|
||||
builder.field(Fields.TOTAL_LOADED_COUNT, classes.getTotalLoadedClassCount());
|
||||
builder.field(Fields.TOTAL_UNLOADED_COUNT, classes.getUnloadedClassCount());
|
||||
builder.endObject();
|
||||
|
||||
builder.endObject();
|
||||
return builder;
|
||||
|
@ -291,7 +299,6 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
static final String COLLECTION_TIME_IN_MILLIS = "collection_time_in_millis";
|
||||
|
||||
static final String BUFFER_POOLS = "buffer_pools";
|
||||
static final String NAME = "name";
|
||||
static final String TOTAL_CAPACITY = "total_capacity";
|
||||
static final String TOTAL_CAPACITY_IN_BYTES = "total_capacity_in_bytes";
|
||||
|
||||
|
@ -301,80 +308,21 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
static final String TOTAL_UNLOADED_COUNT = "total_unloaded_count";
|
||||
}
|
||||
|
||||
public static class GarbageCollectors implements Writeable, Iterable<GarbageCollector> {
|
||||
|
||||
public static JvmStats readJvmStats(StreamInput in) throws IOException {
|
||||
JvmStats jvmStats = new JvmStats();
|
||||
jvmStats.readFrom(in);
|
||||
return jvmStats;
|
||||
}
|
||||
private final GarbageCollector[] collectors;
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
timestamp = in.readVLong();
|
||||
uptime = in.readVLong();
|
||||
|
||||
mem = Mem.readMem(in);
|
||||
threads = Threads.readThreads(in);
|
||||
gc = GarbageCollectors.readGarbageCollectors(in);
|
||||
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
bufferPools = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
BufferPool bufferPool = new BufferPool();
|
||||
bufferPool.readFrom(in);
|
||||
bufferPools.add(bufferPool);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(timestamp);
|
||||
out.writeVLong(uptime);
|
||||
|
||||
mem.writeTo(out);
|
||||
threads.writeTo(out);
|
||||
gc.writeTo(out);
|
||||
|
||||
if (bufferPools == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
out.writeVInt(bufferPools.size());
|
||||
for (BufferPool bufferPool : bufferPools) {
|
||||
bufferPool.writeTo(out);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static class GarbageCollectors implements Streamable, Iterable<GarbageCollector> {
|
||||
|
||||
GarbageCollector[] collectors;
|
||||
|
||||
GarbageCollectors() {
|
||||
public GarbageCollectors(GarbageCollector[] collectors) {
|
||||
this.collectors = collectors;
|
||||
}
|
||||
|
||||
public static GarbageCollectors readGarbageCollectors(StreamInput in) throws IOException {
|
||||
GarbageCollectors collectors = new GarbageCollectors();
|
||||
collectors.readFrom(in);
|
||||
return collectors;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
collectors = new GarbageCollector[in.readVInt()];
|
||||
for (int i = 0; i < collectors.length; i++) {
|
||||
collectors[i] = GarbageCollector.readGarbageCollector(in);
|
||||
}
|
||||
public GarbageCollectors(StreamInput in) throws IOException {
|
||||
collectors = in.readArray(GarbageCollector::new, GarbageCollector[]::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(collectors.length);
|
||||
for (GarbageCollector gc : collectors) {
|
||||
gc.writeTo(out);
|
||||
}
|
||||
out.writeArray(collectors);
|
||||
}
|
||||
|
||||
public GarbageCollector[] getCollectors() {
|
||||
|
@ -387,23 +335,19 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
}
|
||||
}
|
||||
|
||||
public static class GarbageCollector implements Streamable {
|
||||
public static class GarbageCollector implements Writeable {
|
||||
|
||||
String name;
|
||||
long collectionCount;
|
||||
long collectionTime;
|
||||
private final String name;
|
||||
private final long collectionCount;
|
||||
private final long collectionTime;
|
||||
|
||||
GarbageCollector() {
|
||||
public GarbageCollector(String name, long collectionCount, long collectionTime) {
|
||||
this.name = name;
|
||||
this.collectionCount = collectionCount;
|
||||
this.collectionTime = collectionTime;
|
||||
}
|
||||
|
||||
public static GarbageCollector readGarbageCollector(StreamInput in) throws IOException {
|
||||
GarbageCollector gc = new GarbageCollector();
|
||||
gc.readFrom(in);
|
||||
return gc;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
public GarbageCollector(StreamInput in) throws IOException {
|
||||
name = in.readString();
|
||||
collectionCount = in.readVLong();
|
||||
collectionTime = in.readVLong();
|
||||
|
@ -429,30 +373,17 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
}
|
||||
}
|
||||
|
||||
public static class Threads implements Streamable {
|
||||
public static class Threads implements Writeable {
|
||||
|
||||
int count;
|
||||
int peakCount;
|
||||
private final int count;
|
||||
private final int peakCount;
|
||||
|
||||
Threads() {
|
||||
public Threads(int count, int peakCount) {
|
||||
this.count = count;
|
||||
this.peakCount = peakCount;
|
||||
}
|
||||
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public int getPeakCount() {
|
||||
return peakCount;
|
||||
}
|
||||
|
||||
public static Threads readThreads(StreamInput in) throws IOException {
|
||||
Threads threads = new Threads();
|
||||
threads.readFrom(in);
|
||||
return threads;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
public Threads(StreamInput in) throws IOException {
|
||||
count = in.readVInt();
|
||||
peakCount = in.readVInt();
|
||||
}
|
||||
|
@ -462,20 +393,23 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
out.writeVInt(count);
|
||||
out.writeVInt(peakCount);
|
||||
}
|
||||
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public int getPeakCount() {
|
||||
return peakCount;
|
||||
}
|
||||
}
|
||||
|
||||
public static class MemoryPool implements Streamable {
|
||||
public static class MemoryPool implements Writeable {
|
||||
|
||||
String name;
|
||||
long used;
|
||||
long max;
|
||||
|
||||
long peakUsed;
|
||||
long peakMax;
|
||||
|
||||
MemoryPool() {
|
||||
|
||||
}
|
||||
private final String name;
|
||||
private final long used;
|
||||
private final long max;
|
||||
private final long peakUsed;
|
||||
private final long peakMax;
|
||||
|
||||
public MemoryPool(String name, long used, long max, long peakUsed, long peakMax) {
|
||||
this.name = name;
|
||||
|
@ -485,10 +419,21 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
this.peakMax = peakMax;
|
||||
}
|
||||
|
||||
public static MemoryPool readMemoryPool(StreamInput in) throws IOException {
|
||||
MemoryPool pool = new MemoryPool();
|
||||
pool.readFrom(in);
|
||||
return pool;
|
||||
public MemoryPool(StreamInput in) throws IOException {
|
||||
name = in.readString();
|
||||
used = in.readVLong();
|
||||
max = in.readVLong();
|
||||
peakUsed = in.readVLong();
|
||||
peakMax = in.readVLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
out.writeVLong(used);
|
||||
out.writeVLong(max);
|
||||
out.writeVLong(peakUsed);
|
||||
out.writeVLong(peakMax);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
|
@ -510,61 +455,33 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
public ByteSizeValue getPeakMax() {
|
||||
return new ByteSizeValue(peakMax);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
name = in.readString();
|
||||
used = in.readVLong();
|
||||
max = in.readVLong();
|
||||
peakUsed = in.readVLong();
|
||||
peakMax = in.readVLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
out.writeVLong(used);
|
||||
out.writeVLong(max);
|
||||
out.writeVLong(peakUsed);
|
||||
out.writeVLong(peakMax);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Mem implements Streamable, Iterable<MemoryPool> {
|
||||
public static class Mem implements Writeable, Iterable<MemoryPool> {
|
||||
|
||||
long heapCommitted;
|
||||
long heapUsed;
|
||||
long heapMax;
|
||||
long nonHeapCommitted;
|
||||
long nonHeapUsed;
|
||||
private final long heapCommitted;
|
||||
private final long heapUsed;
|
||||
private final long heapMax;
|
||||
private final long nonHeapCommitted;
|
||||
private final long nonHeapUsed;
|
||||
private final List<MemoryPool> pools;
|
||||
|
||||
MemoryPool[] pools = new MemoryPool[0];
|
||||
|
||||
Mem() {
|
||||
public Mem(long heapCommitted, long heapUsed, long heapMax, long nonHeapCommitted, long nonHeapUsed, List<MemoryPool> pools) {
|
||||
this.heapCommitted = heapCommitted;
|
||||
this.heapUsed = heapUsed;
|
||||
this.heapMax = heapMax;
|
||||
this.nonHeapCommitted = nonHeapCommitted;
|
||||
this.nonHeapUsed = nonHeapUsed;
|
||||
this.pools = pools;
|
||||
}
|
||||
|
||||
public static Mem readMem(StreamInput in) throws IOException {
|
||||
Mem mem = new Mem();
|
||||
mem.readFrom(in);
|
||||
return mem;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<MemoryPool> iterator() {
|
||||
return Arrays.stream(pools).iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
public Mem(StreamInput in) throws IOException {
|
||||
heapCommitted = in.readVLong();
|
||||
heapUsed = in.readVLong();
|
||||
nonHeapCommitted = in.readVLong();
|
||||
nonHeapUsed = in.readVLong();
|
||||
heapMax = in.readVLong();
|
||||
pools = new MemoryPool[in.readVInt()];
|
||||
for (int i = 0; i < pools.length; i++) {
|
||||
pools[i] = MemoryPool.readMemoryPool(in);
|
||||
}
|
||||
pools = in.readList(MemoryPool::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -574,10 +491,12 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
out.writeVLong(nonHeapCommitted);
|
||||
out.writeVLong(nonHeapUsed);
|
||||
out.writeVLong(heapMax);
|
||||
out.writeVInt(pools.length);
|
||||
for (MemoryPool pool : pools) {
|
||||
pool.writeTo(out);
|
||||
}
|
||||
out.writeList(pools);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Iterator<MemoryPool> iterator() {
|
||||
return pools.iterator();
|
||||
}
|
||||
|
||||
public ByteSizeValue getHeapCommitted() {
|
||||
|
@ -614,15 +533,12 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
}
|
||||
}
|
||||
|
||||
public static class BufferPool implements Streamable {
|
||||
public static class BufferPool implements Writeable {
|
||||
|
||||
String name;
|
||||
long count;
|
||||
long totalCapacity;
|
||||
long used;
|
||||
|
||||
BufferPool() {
|
||||
}
|
||||
private final String name;
|
||||
private final long count;
|
||||
private final long totalCapacity;
|
||||
private final long used;
|
||||
|
||||
public BufferPool(String name, long count, long totalCapacity, long used) {
|
||||
this.name = name;
|
||||
|
@ -631,6 +547,21 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
this.used = used;
|
||||
}
|
||||
|
||||
public BufferPool(StreamInput in) throws IOException {
|
||||
name = in.readString();
|
||||
count = in.readLong();
|
||||
totalCapacity = in.readLong();
|
||||
used = in.readLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
out.writeLong(count);
|
||||
out.writeLong(totalCapacity);
|
||||
out.writeLong(used);
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return this.name;
|
||||
}
|
||||
|
@ -646,32 +577,13 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
public ByteSizeValue getUsed() {
|
||||
return new ByteSizeValue(used);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
name = in.readString();
|
||||
count = in.readLong();
|
||||
totalCapacity = in.readLong();
|
||||
used = in.readLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(name);
|
||||
out.writeLong(count);
|
||||
out.writeLong(totalCapacity);
|
||||
out.writeLong(used);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Classes implements Streamable {
|
||||
public static class Classes implements Writeable {
|
||||
|
||||
long loadedClassCount;
|
||||
long totalLoadedClassCount;
|
||||
long unloadedClassCount;
|
||||
|
||||
Classes() {
|
||||
}
|
||||
private final long loadedClassCount;
|
||||
private final long totalLoadedClassCount;
|
||||
private final long unloadedClassCount;
|
||||
|
||||
public Classes(long loadedClassCount, long totalLoadedClassCount, long unloadedClassCount) {
|
||||
this.loadedClassCount = loadedClassCount;
|
||||
|
@ -679,6 +591,19 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
this.unloadedClassCount = unloadedClassCount;
|
||||
}
|
||||
|
||||
public Classes(StreamInput in) throws IOException {
|
||||
loadedClassCount = in.readLong();
|
||||
totalLoadedClassCount = in.readLong();
|
||||
unloadedClassCount = in.readLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeLong(loadedClassCount);
|
||||
out.writeLong(totalLoadedClassCount);
|
||||
out.writeLong(unloadedClassCount);
|
||||
}
|
||||
|
||||
public long getLoadedClassCount() {
|
||||
return loadedClassCount;
|
||||
}
|
||||
|
@ -690,19 +615,5 @@ public class JvmStats implements Streamable, ToXContent {
|
|||
public long getUnloadedClassCount() {
|
||||
return unloadedClassCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
loadedClassCount = in.readLong();
|
||||
totalLoadedClassCount = in.readLong();
|
||||
unloadedClassCount = in.readLong();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeLong(loadedClassCount);
|
||||
out.writeLong(totalLoadedClassCount);
|
||||
out.writeLong(unloadedClassCount);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -131,21 +131,9 @@ public class ProcessProbe {
|
|||
}
|
||||
|
||||
public ProcessStats processStats() {
|
||||
ProcessStats stats = new ProcessStats();
|
||||
stats.timestamp = System.currentTimeMillis();
|
||||
stats.openFileDescriptors = getOpenFileDescriptorCount();
|
||||
stats.maxFileDescriptors = getMaxFileDescriptorCount();
|
||||
|
||||
ProcessStats.Cpu cpu = new ProcessStats.Cpu();
|
||||
cpu.percent = getProcessCpuPercent();
|
||||
cpu.total = getProcessCpuTotalTime();
|
||||
stats.cpu = cpu;
|
||||
|
||||
ProcessStats.Mem mem = new ProcessStats.Mem();
|
||||
mem.totalVirtual = getTotalVirtualMemorySize();
|
||||
stats.mem = mem;
|
||||
|
||||
return stats;
|
||||
ProcessStats.Cpu cpu = new ProcessStats.Cpu(getProcessCpuPercent(), getProcessCpuTotalTime());
|
||||
ProcessStats.Mem mem = new ProcessStats.Mem(getTotalVirtualMemorySize());
|
||||
return new ProcessStats(System.currentTimeMillis(), getOpenFileDescriptorCount(), getMaxFileDescriptorCount(), cpu, mem);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.monitor.process;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
|
@ -29,18 +29,37 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ProcessStats implements Streamable, ToXContent {
|
||||
public class ProcessStats implements Writeable, ToXContent {
|
||||
|
||||
long timestamp = -1;
|
||||
private final long timestamp;
|
||||
private final long openFileDescriptors;
|
||||
private final long maxFileDescriptors;
|
||||
private final Cpu cpu;
|
||||
private final Mem mem;
|
||||
|
||||
long openFileDescriptors = -1;
|
||||
long maxFileDescriptors = -1;
|
||||
public ProcessStats(long timestamp, long openFileDescriptors, long maxFileDescriptors, Cpu cpu, Mem mem) {
|
||||
this.timestamp = timestamp;
|
||||
this.openFileDescriptors = openFileDescriptors;
|
||||
this.maxFileDescriptors = maxFileDescriptors;
|
||||
this.cpu = cpu;
|
||||
this.mem = mem;
|
||||
}
|
||||
|
||||
Cpu cpu = null;
|
||||
public ProcessStats(StreamInput in) throws IOException {
|
||||
timestamp = in.readVLong();
|
||||
openFileDescriptors = in.readLong();
|
||||
maxFileDescriptors = in.readLong();
|
||||
cpu = in.readOptionalWriteable(Cpu::new);
|
||||
mem = in.readOptionalWriteable(Mem::new);
|
||||
}
|
||||
|
||||
Mem mem = null;
|
||||
|
||||
ProcessStats() {
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(timestamp);
|
||||
out.writeLong(openFileDescriptors);
|
||||
out.writeLong(maxFileDescriptors);
|
||||
out.writeOptionalWriteable(cpu);
|
||||
out.writeOptionalWriteable(mem);
|
||||
}
|
||||
|
||||
public long getTimestamp() {
|
||||
|
@ -100,59 +119,15 @@ public class ProcessStats implements Streamable, ToXContent {
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static ProcessStats readProcessStats(StreamInput in) throws IOException {
|
||||
ProcessStats stats = new ProcessStats();
|
||||
stats.readFrom(in);
|
||||
return stats;
|
||||
}
|
||||
public static class Mem implements Writeable {
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
timestamp = in.readVLong();
|
||||
openFileDescriptors = in.readLong();
|
||||
maxFileDescriptors = in.readLong();
|
||||
if (in.readBoolean()) {
|
||||
cpu = Cpu.readCpu(in);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
mem = Mem.readMem(in);
|
||||
}
|
||||
}
|
||||
private final long totalVirtual;
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVLong(timestamp);
|
||||
out.writeLong(openFileDescriptors);
|
||||
out.writeLong(maxFileDescriptors);
|
||||
if (cpu == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
cpu.writeTo(out);
|
||||
}
|
||||
if (mem == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
mem.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
public static class Mem implements Streamable {
|
||||
|
||||
long totalVirtual = -1;
|
||||
|
||||
Mem() {
|
||||
public Mem(long totalVirtual) {
|
||||
this.totalVirtual = totalVirtual;
|
||||
}
|
||||
|
||||
public static Mem readMem(StreamInput in) throws IOException {
|
||||
Mem mem = new Mem();
|
||||
mem.readFrom(in);
|
||||
return mem;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
public Mem(StreamInput in) throws IOException {
|
||||
totalVirtual = in.readLong();
|
||||
}
|
||||
|
||||
|
@ -166,23 +141,17 @@ public class ProcessStats implements Streamable, ToXContent {
|
|||
}
|
||||
}
|
||||
|
||||
public static class Cpu implements Streamable {
|
||||
public static class Cpu implements Writeable {
|
||||
|
||||
short percent = -1;
|
||||
long total = -1;
|
||||
|
||||
Cpu() {
|
||||
private final short percent;
|
||||
private final long total;
|
||||
|
||||
public Cpu(short percent, long total) {
|
||||
this.percent = percent;
|
||||
this.total = total;
|
||||
}
|
||||
|
||||
public static Cpu readCpu(StreamInput in) throws IOException {
|
||||
Cpu cpu = new Cpu();
|
||||
cpu.readFrom(in);
|
||||
return cpu;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
public Cpu(StreamInput in) throws IOException {
|
||||
percent = in.readShort();
|
||||
total = in.readLong();
|
||||
}
|
||||
|
|
|
@ -269,12 +269,6 @@ public class Node implements Closeable {
|
|||
logger.debug("using config [{}], data [{}], logs [{}], plugins [{}]",
|
||||
environment.configFile(), Arrays.toString(environment.dataFiles()), environment.logsFile(), environment.pluginsFile());
|
||||
}
|
||||
// TODO: Remove this in Elasticsearch 6.0.0
|
||||
if (JsonXContent.unquotedFieldNamesSet) {
|
||||
DeprecationLogger dLogger = new DeprecationLogger(logger);
|
||||
dLogger.deprecated("[{}] has been set, but will be removed in Elasticsearch 6.0.0",
|
||||
JsonXContent.JSON_ALLOW_UNQUOTED_FIELD_NAMES);
|
||||
}
|
||||
|
||||
this.pluginsService = new PluginsService(tmpSettings, environment.modulesFile(), environment.pluginsFile(), classpathPlugins);
|
||||
this.settings = pluginsService.updatedSettings();
|
||||
|
@ -455,7 +449,7 @@ public class Node implements Closeable {
|
|||
/**
|
||||
* Start the node. If the node is already started, this method is no-op.
|
||||
*/
|
||||
public Node start() {
|
||||
public Node start() throws NodeValidationException {
|
||||
if (!lifecycle.moveToStarted()) {
|
||||
return this;
|
||||
}
|
||||
|
@ -606,24 +600,6 @@ public class Node implements Closeable {
|
|||
injector.getInstance(IndicesService.class).stop();
|
||||
logger.info("stopped");
|
||||
|
||||
final String log4jShutdownEnabled = System.getProperty("es.log4j.shutdownEnabled", "true");
|
||||
final boolean shutdownEnabled;
|
||||
switch (log4jShutdownEnabled) {
|
||||
case "true":
|
||||
shutdownEnabled = true;
|
||||
break;
|
||||
case "false":
|
||||
shutdownEnabled = false;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException(
|
||||
"invalid value for [es.log4j.shutdownEnabled], was [" + log4jShutdownEnabled + "] but must be [true] or [false]");
|
||||
}
|
||||
if (shutdownEnabled) {
|
||||
LoggerContext context = (LoggerContext) LogManager.getContext(false);
|
||||
Configurator.shutdown(context);
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -715,6 +691,24 @@ public class Node implements Closeable {
|
|||
}
|
||||
IOUtils.close(toClose);
|
||||
logger.info("closed");
|
||||
|
||||
final String log4jShutdownEnabled = System.getProperty("es.log4j.shutdownEnabled", "true");
|
||||
final boolean shutdownEnabled;
|
||||
switch (log4jShutdownEnabled) {
|
||||
case "true":
|
||||
shutdownEnabled = true;
|
||||
break;
|
||||
case "false":
|
||||
shutdownEnabled = false;
|
||||
break;
|
||||
default:
|
||||
throw new IllegalArgumentException(
|
||||
"invalid value for [es.log4j.shutdownEnabled], was [" + log4jShutdownEnabled + "] but must be [true] or [false]");
|
||||
}
|
||||
if (shutdownEnabled) {
|
||||
LoggerContext context = (LoggerContext) LogManager.getContext(false);
|
||||
Configurator.shutdown(context);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -740,7 +734,9 @@ public class Node implements Closeable {
|
|||
* bound and publishing to
|
||||
*/
|
||||
@SuppressWarnings("unused")
|
||||
protected void validateNodeBeforeAcceptingRequests(Settings settings, BoundTransportAddress boundTransportAddress) {
|
||||
protected void validateNodeBeforeAcceptingRequests(
|
||||
final Settings settings,
|
||||
final BoundTransportAddress boundTransportAddress) throws NodeValidationException {
|
||||
}
|
||||
|
||||
/** Writes a file to the logs dir containing the ports for the given transport type */
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.node;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.BoundTransportAddress;
|
||||
|
||||
/**
|
||||
* An exception thrown during node validation. Node validation runs immediately before a node
|
||||
* begins accepting network requests in
|
||||
* {@link Node#validateNodeBeforeAcceptingRequests(Settings, BoundTransportAddress)}. This
|
||||
* exception is a checked exception that is declared as thrown from this method for the purpose
|
||||
* of bubbling up to the user.
|
||||
*/
|
||||
public class NodeValidationException extends Exception {
|
||||
|
||||
/**
|
||||
* Creates a node validation exception with the specified validation message to be displayed to
|
||||
* the user.
|
||||
*
|
||||
* @param message the message to display to the user
|
||||
*/
|
||||
public NodeValidationException(final String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
}
|
|
@ -30,6 +30,8 @@ import org.elasticsearch.index.query.QueryBuilder;
|
|||
import org.elasticsearch.index.query.QueryParser;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
||||
import org.elasticsearch.search.SearchExtBuilder;
|
||||
import org.elasticsearch.search.SearchExtParser;
|
||||
import org.elasticsearch.search.aggregations.Aggregation;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
|
@ -82,6 +84,12 @@ public interface SearchPlugin {
|
|||
default List<FetchSubPhase> getFetchSubPhases(FetchPhaseConstructionContext context) {
|
||||
return emptyList();
|
||||
}
|
||||
/**
|
||||
* The new {@link SearchExtParser}s defined by this plugin.
|
||||
*/
|
||||
default List<SearchExtSpec<?>> getSearchExts() {
|
||||
return emptyList();
|
||||
}
|
||||
/**
|
||||
* Get the {@link Highlighter}s defined by this plugin.
|
||||
*/
|
||||
|
@ -160,7 +168,7 @@ public interface SearchPlugin {
|
|||
/**
|
||||
* Specification for an {@link Aggregation}.
|
||||
*/
|
||||
public static class AggregationSpec extends SearchExtensionSpec<AggregationBuilder, Aggregator.Parser> {
|
||||
class AggregationSpec extends SearchExtensionSpec<AggregationBuilder, Aggregator.Parser> {
|
||||
private final Map<String, Writeable.Reader<? extends InternalAggregation>> resultReaders = new TreeMap<>();
|
||||
|
||||
/**
|
||||
|
@ -217,7 +225,7 @@ public interface SearchPlugin {
|
|||
/**
|
||||
* Specification for a {@link PipelineAggregator}.
|
||||
*/
|
||||
public static class PipelineAggregationSpec extends SearchExtensionSpec<PipelineAggregationBuilder, PipelineAggregator.Parser> {
|
||||
class PipelineAggregationSpec extends SearchExtensionSpec<PipelineAggregationBuilder, PipelineAggregator.Parser> {
|
||||
private final Map<String, Writeable.Reader<? extends InternalAggregation>> resultReaders = new TreeMap<>();
|
||||
private final Writeable.Reader<? extends PipelineAggregator> aggregatorReader;
|
||||
|
||||
|
@ -290,6 +298,19 @@ public interface SearchPlugin {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Specification for a {@link SearchExtBuilder} which represents an additional section that can be
|
||||
* parsed in a search request (within the ext element).
|
||||
*/
|
||||
class SearchExtSpec<T extends SearchExtBuilder> extends SearchExtensionSpec<T, SearchExtParser<T>> {
|
||||
public SearchExtSpec(ParseField name, Writeable.Reader<? extends T> reader, SearchExtParser<T> parser) {
|
||||
super(name, reader, parser);
|
||||
}
|
||||
|
||||
public SearchExtSpec(String name, Writeable.Reader<? extends T> reader, SearchExtParser<T> parser) {
|
||||
super(name, reader, parser);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Specification of search time behavior extension like a custom {@link MovAvgModel} or {@link ScoreFunction}.
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.rest.action.admin.indices;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -41,7 +40,6 @@ public class RestPutIndexTemplateAction extends BaseRestHandler {
|
|||
controller.registerHandler(RestRequest.Method.POST, "/_template/{name}", this);
|
||||
}
|
||||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@Override
|
||||
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) {
|
||||
PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest(request.param("name"));
|
||||
|
@ -51,6 +49,6 @@ public class RestPutIndexTemplateAction extends BaseRestHandler {
|
|||
putRequest.create(request.paramAsBoolean("create", false));
|
||||
putRequest.cause(request.param("cause", ""));
|
||||
putRequest.source(request.content());
|
||||
client.admin().indices().putTemplate(putRequest, new AcknowledgedRestListener<PutIndexTemplateResponse>(channel));
|
||||
client.admin().indices().putTemplate(putRequest, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,10 +19,6 @@
|
|||
|
||||
package org.elasticsearch.rest.action.search;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
|
@ -40,12 +36,16 @@ import org.elasticsearch.index.query.QueryParseContext;
|
|||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.RestActions;
|
||||
import org.elasticsearch.rest.action.RestToXContentListener;
|
||||
import org.elasticsearch.search.SearchRequestParsers;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.function.BiConsumer;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.lenientNodeBooleanValue;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringArrayValue;
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue;
|
||||
|
@ -97,7 +97,7 @@ public class RestMultiSearchAction extends BaseRestHandler {
|
|||
final QueryParseContext queryParseContext = new QueryParseContext(searchRequestParsers.queryParsers,
|
||||
requestParser, parseFieldMatcher);
|
||||
searchRequest.source(SearchSourceBuilder.fromXContent(queryParseContext,
|
||||
searchRequestParsers.aggParsers, searchRequestParsers.suggesters));
|
||||
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers));
|
||||
multiRequest.add(searchRequest);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("Exception when parsing search request", e);
|
||||
|
|
|
@ -102,7 +102,8 @@ public class RestSearchAction extends BaseRestHandler {
|
|||
if (restContent != null) {
|
||||
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
|
||||
QueryParseContext context = new QueryParseContext(searchRequestParsers.queryParsers, parser, parseFieldMatcher);
|
||||
searchRequest.source().parseXContent(context, searchRequestParsers.aggParsers, searchRequestParsers.suggesters);
|
||||
searchRequest.source().parseXContent(context, searchRequestParsers.aggParsers, searchRequestParsers.suggesters,
|
||||
searchRequestParsers.searchExtParsers);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.cluster.Diff;
|
|||
import org.elasticsearch.cluster.DiffableUtils;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -65,7 +66,7 @@ public final class ScriptMetaData implements MetaData.Custom {
|
|||
if (scriptAsBytes == null) {
|
||||
return null;
|
||||
}
|
||||
return parseStoredScript(scriptAsBytes);
|
||||
return scriptAsBytes.utf8ToString();
|
||||
}
|
||||
|
||||
public static String parseStoredScript(BytesReference scriptAsBytes) {
|
||||
|
@ -78,6 +79,9 @@ public final class ScriptMetaData implements MetaData.Custom {
|
|||
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) {
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
if (parser.currentToken() == Token.END_OBJECT) {
|
||||
throw new IllegalArgumentException("Empty script");
|
||||
}
|
||||
switch (parser.currentName()) {
|
||||
case "script":
|
||||
case "template":
|
||||
|
@ -115,10 +119,8 @@ public final class ScriptMetaData implements MetaData.Custom {
|
|||
case FIELD_NAME:
|
||||
key = parser.currentName();
|
||||
break;
|
||||
case START_OBJECT:
|
||||
XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent());
|
||||
contentBuilder.copyCurrentStructure(parser);
|
||||
scripts.put(key, new ScriptAsBytes(contentBuilder.bytes()));
|
||||
case VALUE_STRING:
|
||||
scripts.put(key, new ScriptAsBytes(new BytesArray(parser.text())));
|
||||
break;
|
||||
default:
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unexpected token [" + token + "]");
|
||||
|
@ -147,7 +149,7 @@ public final class ScriptMetaData implements MetaData.Custom {
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
for (Map.Entry<String, ScriptAsBytes> entry : scripts.entrySet()) {
|
||||
builder.rawField(entry.getKey(), entry.getValue().script);
|
||||
builder.field(entry.getKey(), entry.getValue().script.utf8ToString());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
@ -188,8 +190,8 @@ public final class ScriptMetaData implements MetaData.Custom {
|
|||
@Override
|
||||
public String toString() {
|
||||
return "ScriptMetaData{" +
|
||||
"scripts=" + scripts +
|
||||
'}';
|
||||
"scripts=" + scripts +
|
||||
'}';
|
||||
}
|
||||
|
||||
static String toKey(String language, String id) {
|
||||
|
@ -216,7 +218,8 @@ public final class ScriptMetaData implements MetaData.Custom {
|
|||
}
|
||||
|
||||
public Builder storeScript(String lang, String id, BytesReference script) {
|
||||
scripts.put(toKey(lang, id), new ScriptAsBytes(script));
|
||||
BytesReference scriptBytest = new BytesArray(parseStoredScript(script));
|
||||
scripts.put(toKey(lang, id), new ScriptAsBytes(scriptBytest));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,39 +21,22 @@ package org.elasticsearch.script;
|
|||
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ScriptStats implements Streamable, ToXContent {
|
||||
private long compilations;
|
||||
private long cacheEvictions;
|
||||
|
||||
public ScriptStats() {
|
||||
}
|
||||
public class ScriptStats implements Writeable, ToXContent {
|
||||
private final long compilations;
|
||||
private final long cacheEvictions;
|
||||
|
||||
public ScriptStats(long compilations, long cacheEvictions) {
|
||||
this.compilations = compilations;
|
||||
this.cacheEvictions = cacheEvictions;
|
||||
}
|
||||
|
||||
public void add(ScriptStats stats) {
|
||||
this.compilations += stats.compilations;
|
||||
this.cacheEvictions += stats.cacheEvictions;
|
||||
}
|
||||
|
||||
public long getCompilations() {
|
||||
return compilations;
|
||||
}
|
||||
|
||||
public long getCacheEvictions() {
|
||||
return cacheEvictions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
public ScriptStats(StreamInput in) throws IOException {
|
||||
compilations = in.readVLong();
|
||||
cacheEvictions = in.readVLong();
|
||||
}
|
||||
|
@ -64,6 +47,14 @@ public class ScriptStats implements Streamable, ToXContent {
|
|||
out.writeVLong(cacheEvictions);
|
||||
}
|
||||
|
||||
public long getCompilations() {
|
||||
return compilations;
|
||||
}
|
||||
|
||||
public long getCacheEvictions() {
|
||||
return cacheEvictions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(Fields.SCRIPT_STATS);
|
||||
|
|
|
@ -0,0 +1,51 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.plugins.SearchPlugin;
|
||||
import org.elasticsearch.plugins.SearchPlugin.SearchExtSpec;
|
||||
|
||||
/**
|
||||
* Intermediate serializable representation of a search ext section. To be subclassed by plugins that support
|
||||
* a custom section as part of a search request, which will be provided within the ext element.
|
||||
* Any state needs to be serialized as part of the {@link Writeable#writeTo(StreamOutput)} method and
|
||||
* read from the incoming stream, usually done adding a constructor that takes {@link StreamInput} as
|
||||
* an argument.
|
||||
*
|
||||
* Registration happens through {@link SearchPlugin#getSearchExts()}, which also needs a {@link SearchExtParser} that's able to parse
|
||||
* the incoming request from the REST layer into the proper {@link SearchExtBuilder} subclass.
|
||||
*
|
||||
* {@link #getWriteableName()} must return the same name as the one used for the registration
|
||||
* of the {@link SearchExtSpec}.
|
||||
*
|
||||
* @see SearchExtParser
|
||||
* @see SearchExtSpec
|
||||
*/
|
||||
public abstract class SearchExtBuilder implements NamedWriteable, ToXContent {
|
||||
|
||||
public abstract int hashCode();
|
||||
|
||||
public abstract boolean equals(Object obj);
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Defines a parser that is able to parse {@link org.elasticsearch.search.SearchExtBuilder}s
|
||||
* from {@link org.elasticsearch.common.xcontent.XContent}.
|
||||
*
|
||||
* Registration happens through {@link org.elasticsearch.plugins.SearchPlugin#getSearchExts()}, which also needs a {@link SearchExtBuilder}
|
||||
* implementation which is the object that this parser returns when reading an incoming request form the REST layer.
|
||||
*
|
||||
* @see SearchExtBuilder
|
||||
* @see org.elasticsearch.plugins.SearchPlugin.SearchExtSpec
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface SearchExtParser<T extends SearchExtBuilder> {
|
||||
|
||||
/**
|
||||
* Parses the supported element placed within the ext section of a search request
|
||||
*/
|
||||
T fromXContent(XContentParser parser) throws IOException;
|
||||
}
|
|
@ -19,13 +19,14 @@
|
|||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
|
||||
|
||||
/**
|
||||
*
|
||||
* Extensions to ParseFieldRegistry to make Guice happy.
|
||||
*/
|
||||
public interface SearchParseElement {
|
||||
public class SearchExtRegistry extends ParseFieldRegistry<SearchExtParser> {
|
||||
|
||||
void parse(XContentParser parser, SearchContext context) throws Exception;
|
||||
public SearchExtRegistry() {
|
||||
super("ext");
|
||||
}
|
||||
}
|
|
@ -93,6 +93,7 @@ import org.elasticsearch.plugins.SearchPlugin.FetchPhaseConstructionContext;
|
|||
import org.elasticsearch.plugins.SearchPlugin.PipelineAggregationSpec;
|
||||
import org.elasticsearch.plugins.SearchPlugin.QuerySpec;
|
||||
import org.elasticsearch.plugins.SearchPlugin.ScoreFunctionSpec;
|
||||
import org.elasticsearch.plugins.SearchPlugin.SearchExtSpec;
|
||||
import org.elasticsearch.plugins.SearchPlugin.SearchExtensionSpec;
|
||||
import org.elasticsearch.search.action.SearchTransportService;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
|
@ -306,6 +307,7 @@ public class SearchModule extends AbstractModule {
|
|||
"moving_avg_model");
|
||||
|
||||
private final List<FetchSubPhase> fetchSubPhases = new ArrayList<>();
|
||||
private final SearchExtRegistry searchExtParserRegistry = new SearchExtRegistry();
|
||||
|
||||
private final Settings settings;
|
||||
private final List<Entry> namedWriteables = new ArrayList<>();
|
||||
|
@ -326,8 +328,9 @@ public class SearchModule extends AbstractModule {
|
|||
registerAggregations(plugins);
|
||||
registerPipelineAggregations(plugins);
|
||||
registerFetchSubPhases(plugins);
|
||||
registerSearchExts(plugins);
|
||||
registerShapes();
|
||||
searchRequestParsers = new SearchRequestParsers(queryParserRegistry, aggregatorParsers, getSuggesters());
|
||||
searchRequestParsers = new SearchRequestParsers(queryParserRegistry, aggregatorParsers, getSuggesters(), searchExtParserRegistry);
|
||||
}
|
||||
|
||||
public List<Entry> getNamedWriteables() {
|
||||
|
@ -380,6 +383,7 @@ public class SearchModule extends AbstractModule {
|
|||
if (false == transportClient) {
|
||||
bind(IndicesQueriesRegistry.class).toInstance(queryParserRegistry);
|
||||
bind(SearchRequestParsers.class).toInstance(searchRequestParsers);
|
||||
bind(SearchExtRegistry.class).toInstance(searchExtParserRegistry);
|
||||
configureSearch();
|
||||
}
|
||||
}
|
||||
|
@ -725,6 +729,15 @@ public class SearchModule extends AbstractModule {
|
|||
registerFromPlugin(plugins, p -> p.getFetchSubPhases(context), this::registerFetchSubPhase);
|
||||
}
|
||||
|
||||
private void registerSearchExts(List<SearchPlugin> plugins) {
|
||||
registerFromPlugin(plugins, SearchPlugin::getSearchExts, this::registerSearchExt);
|
||||
}
|
||||
|
||||
private void registerSearchExt(SearchExtSpec<?> spec) {
|
||||
searchExtParserRegistry.register(spec.getParser(), spec.getName());
|
||||
namedWriteables.add(new Entry(SearchExtBuilder.class, spec.getName().getPreferredName(), spec.getReader()));
|
||||
}
|
||||
|
||||
private void registerFetchSubPhase(FetchSubPhase subPhase) {
|
||||
Class<?> subPhaseClass = subPhase.getClass();
|
||||
if (fetchSubPhases.stream().anyMatch(p -> p.getClass().equals(subPhaseClass))) {
|
||||
|
|
|
@ -21,22 +21,18 @@ package org.elasticsearch.search;
|
|||
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
* Represents a phase of a search request e.g. query, fetch etc.
|
||||
*/
|
||||
public interface SearchPhase {
|
||||
|
||||
default Map<String, ? extends SearchParseElement> parseElements() {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs pre processing of the search context before the execute.
|
||||
*/
|
||||
void preProcess(SearchContext context);
|
||||
|
||||
/**
|
||||
* Executes the search phase
|
||||
*/
|
||||
void execute(SearchContext context);
|
||||
}
|
||||
|
|
|
@ -37,7 +37,8 @@ public class SearchRequestParsers {
|
|||
/**
|
||||
* Query parsers that may be used in search requests.
|
||||
* @see org.elasticsearch.index.query.QueryParseContext
|
||||
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers, Suggesters)
|
||||
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers,
|
||||
* Suggesters, SearchExtRegistry)
|
||||
*/
|
||||
public final IndicesQueriesRegistry queryParsers;
|
||||
|
||||
|
@ -45,20 +46,29 @@ public class SearchRequestParsers {
|
|||
// and pipeline agg parsers should be here
|
||||
/**
|
||||
* Agg and pipeline agg parsers that may be used in search requests.
|
||||
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers, Suggesters)
|
||||
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers,
|
||||
* Suggesters, SearchExtRegistry)
|
||||
*/
|
||||
public final AggregatorParsers aggParsers;
|
||||
|
||||
// TODO: Suggesters should be removed and the underlying map moved here
|
||||
/**
|
||||
* Suggesters that may be used in search requests.
|
||||
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers, Suggesters)
|
||||
* @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers,
|
||||
* Suggesters, SearchExtRegistry)
|
||||
*/
|
||||
public final Suggesters suggesters;
|
||||
|
||||
public SearchRequestParsers(IndicesQueriesRegistry queryParsers, AggregatorParsers aggParsers, Suggesters suggesters) {
|
||||
/**
|
||||
* Pluggable section that can be parsed out of a search section, within the ext element
|
||||
*/
|
||||
public final SearchExtRegistry searchExtParsers;
|
||||
|
||||
public SearchRequestParsers(IndicesQueriesRegistry queryParsers, AggregatorParsers aggParsers, Suggesters suggesters,
|
||||
SearchExtRegistry searchExtParsers) {
|
||||
this.queryParsers = queryParsers;
|
||||
this.aggParsers = aggParsers;
|
||||
this.suggesters = suggesters;
|
||||
this.searchExtParsers = searchExtParsers;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,9 +39,6 @@ import org.elasticsearch.common.unit.TimeValue;
|
|||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
|
@ -67,8 +64,6 @@ import org.elasticsearch.search.fetch.QueryFetchSearchResult;
|
|||
import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.ShardFetchRequest;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.internal.DefaultSearchContext;
|
||||
|
@ -102,7 +97,6 @@ import java.util.Optional;
|
|||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes;
|
||||
|
||||
|
@ -145,8 +139,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
|
||||
private final ConcurrentMapLong<SearchContext> activeContexts = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency();
|
||||
|
||||
private final Map<String, SearchParseElement> elementParsers;
|
||||
|
||||
private final ParseFieldMatcher parseFieldMatcher;
|
||||
|
||||
@Inject
|
||||
|
@ -165,12 +157,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
TimeValue keepAliveInterval = KEEPALIVE_INTERVAL_SETTING.get(settings);
|
||||
this.defaultKeepAlive = DEFAULT_KEEPALIVE_SETTING.get(settings).millis();
|
||||
|
||||
Map<String, SearchParseElement> elementParsers = new HashMap<>();
|
||||
elementParsers.putAll(dfsPhase.parseElements());
|
||||
elementParsers.putAll(queryPhase.parseElements());
|
||||
elementParsers.putAll(fetchPhase.parseElements());
|
||||
this.elementParsers = unmodifiableMap(elementParsers);
|
||||
|
||||
this.keepAliveReaper = threadPool.scheduleWithFixedDelay(new Reaper(), keepAliveInterval, Names.SAME);
|
||||
|
||||
defaultSearchTimeout = DEFAULT_SEARCH_TIMEOUT_SETTING.get(settings);
|
||||
|
@ -465,7 +451,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
throw ExceptionsHelper.convertToRuntime(e);
|
||||
}
|
||||
operationListener.onFetchPhase(context, System.nanoTime() - time2);
|
||||
return new ScrollQueryFetchSearchResult(new QueryFetchSearchResult(context.queryResult(), context.fetchResult()), context.shardTarget());
|
||||
return new ScrollQueryFetchSearchResult(new QueryFetchSearchResult(context.queryResult(), context.fetchResult()),
|
||||
context.shardTarget());
|
||||
} catch (Exception e) {
|
||||
logger.trace("Fetch phase failed", e);
|
||||
processFailure(context, e);
|
||||
|
@ -736,11 +723,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
context.fetchSourceContext(source.fetchSource());
|
||||
}
|
||||
if (source.docValueFields() != null) {
|
||||
DocValueFieldsContext docValuesFieldsContext = context.getFetchSubPhaseContext(DocValueFieldsFetchSubPhase.CONTEXT_FACTORY);
|
||||
for (String field : source.docValueFields()) {
|
||||
docValuesFieldsContext.add(new DocValueField(field));
|
||||
}
|
||||
docValuesFieldsContext.setHitExecutionNeeded(true);
|
||||
context.docValueFieldsContext(new DocValueFieldsContext(source.docValueFields()));
|
||||
}
|
||||
if (source.highlighter() != null) {
|
||||
HighlightBuilder highlightBuilder = source.highlighter();
|
||||
|
@ -758,43 +741,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
}
|
||||
}
|
||||
if (source.ext() != null) {
|
||||
XContentParser extParser = null;
|
||||
try {
|
||||
extParser = XContentFactory.xContent(source.ext()).createParser(source.ext());
|
||||
XContentParser.Token token = extParser.nextToken();
|
||||
String currentFieldName = null;
|
||||
while ((token = extParser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = extParser.currentName();
|
||||
} else {
|
||||
SearchParseElement parseElement = this.elementParsers.get(currentFieldName);
|
||||
if (parseElement == null) {
|
||||
if (currentFieldName != null && currentFieldName.equals("suggest")) {
|
||||
throw new SearchParseException(context,
|
||||
"suggest is not supported in [ext], please use SearchSourceBuilder#suggest(SuggestBuilder) instead",
|
||||
extParser.getTokenLocation());
|
||||
}
|
||||
throw new SearchParseException(context, "Unknown element [" + currentFieldName + "] in [ext]",
|
||||
extParser.getTokenLocation());
|
||||
} else {
|
||||
parseElement.parse(extParser, context);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
String sSource = "_na_";
|
||||
try {
|
||||
sSource = source.toString();
|
||||
} catch (Exception inner) {
|
||||
e.addSuppressed(inner);
|
||||
// ignore
|
||||
}
|
||||
XContentLocation location = extParser != null ? extParser.getTokenLocation() : null;
|
||||
throw new SearchParseException(context, "failed to parse ext source [" + sSource + "]", location, e);
|
||||
} finally {
|
||||
if (extParser != null) {
|
||||
extParser.close();
|
||||
}
|
||||
for (SearchExtBuilder searchExtBuilder : source.ext()) {
|
||||
context.addSearchExt(searchExtBuilder);
|
||||
}
|
||||
}
|
||||
if (source.version() != null) {
|
||||
|
@ -910,7 +858,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
continue;
|
||||
}
|
||||
if ((time - lastAccessTime > context.keepAlive())) {
|
||||
logger.debug("freeing search context [{}], time [{}], lastAccessTime [{}], keepAlive [{}]", context.id(), time, lastAccessTime, context.keepAlive());
|
||||
logger.debug("freeing search context [{}], time [{}], lastAccessTime [{}], keepAlive [{}]", context.id(), time,
|
||||
lastAccessTime, context.keepAlive());
|
||||
freeContext(context.id());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
* Aggregation phase of a search request, used to collect aggregations
|
||||
*/
|
||||
public class AggregationPhase implements SearchPhase {
|
||||
|
||||
|
|
|
@ -180,7 +180,7 @@ public class PercentileRanksAggregationBuilder extends LeafOnly<ValuesSource.Num
|
|||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(PercentileRanksParser.VALUES_FIELD.getPreferredName(), values);
|
||||
builder.array(PercentileRanksParser.VALUES_FIELD.getPreferredName(), values);
|
||||
builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(method.getName());
|
||||
if (method == PercentilesMethod.TDIGEST) {
|
||||
|
|
|
@ -180,7 +180,7 @@ public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric
|
|||
|
||||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percents);
|
||||
builder.array(PercentilesParser.PERCENTS_FIELD.getPreferredName(), percents);
|
||||
builder.field(AbstractPercentilesParser.KEYED_FIELD.getPreferredName(), keyed);
|
||||
builder.startObject(method.getName());
|
||||
if (method == PercentilesMethod.TDIGEST) {
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.metrics.tophits;
|
||||
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
|
@ -29,9 +28,8 @@ import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
|||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext.DocValueField;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.internal.SubSearchContext;
|
||||
|
@ -98,12 +96,7 @@ public class TopHitsAggregatorFactory extends AggregatorFactory<TopHitsAggregato
|
|||
subSearchContext.storedFieldsContext(storedFieldsContext);
|
||||
}
|
||||
if (docValueFields != null) {
|
||||
DocValueFieldsContext docValueFieldsContext = subSearchContext
|
||||
.getFetchSubPhaseContext(DocValueFieldsFetchSubPhase.CONTEXT_FACTORY);
|
||||
for (String field : docValueFields) {
|
||||
docValueFieldsContext.add(new DocValueField(field));
|
||||
}
|
||||
docValueFieldsContext.setHitExecutionNeeded(true);
|
||||
subSearchContext.docValueFieldsContext(new DocValueFieldsContext(docValueFields));
|
||||
}
|
||||
if (scriptFields != null) {
|
||||
for (ScriptField field : scriptFields) {
|
||||
|
|
|
@ -113,7 +113,7 @@ public class PercentilesBucketPipelineAggregationBuilder
|
|||
@Override
|
||||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
if (percents != null) {
|
||||
builder.field(PERCENTS_FIELD.getPreferredName(), percents);
|
||||
builder.array(PERCENTS_FIELD.getPreferredName(), percents);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -25,7 +25,6 @@ import org.elasticsearch.common.Nullable;
|
|||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -33,13 +32,14 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.SearchExtBuilder;
|
||||
import org.elasticsearch.search.SearchExtParser;
|
||||
import org.elasticsearch.search.SearchExtRegistry;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
|
@ -108,9 +108,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
public static final ParseField SLICE = new ParseField("slice");
|
||||
|
||||
public static SearchSourceBuilder fromXContent(QueryParseContext context, AggregatorParsers aggParsers,
|
||||
Suggesters suggesters) throws IOException {
|
||||
Suggesters suggesters, SearchExtRegistry searchExtRegistry) throws IOException {
|
||||
SearchSourceBuilder builder = new SearchSourceBuilder();
|
||||
builder.parseXContent(context, aggParsers, suggesters);
|
||||
builder.parseXContent(context, aggParsers, suggesters, searchExtRegistry);
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
@ -164,13 +164,13 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
|
||||
private SuggestBuilder suggestBuilder;
|
||||
|
||||
private List<RescoreBuilder<?>> rescoreBuilders;
|
||||
private List<RescoreBuilder> rescoreBuilders;
|
||||
|
||||
private ObjectFloatHashMap<String> indexBoost = null;
|
||||
|
||||
private List<String> stats;
|
||||
|
||||
private BytesReference ext = null;
|
||||
private List<SearchExtBuilder> extBuilders = Collections.emptyList();
|
||||
|
||||
private boolean profile = false;
|
||||
|
||||
|
@ -203,18 +203,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
postQueryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
|
||||
queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
rescoreBuilders = new ArrayList<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
rescoreBuilders.add(in.readNamedWriteable(RescoreBuilder.class));
|
||||
}
|
||||
rescoreBuilders = in.readNamedWriteableList(RescoreBuilder.class);
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
scriptFields = new ArrayList<>(size);
|
||||
for (int i = 0; i < size; i++) {
|
||||
scriptFields.add(new ScriptField(in));
|
||||
}
|
||||
scriptFields = in.readList(ScriptField::new);
|
||||
}
|
||||
size = in.readVInt();
|
||||
if (in.readBoolean()) {
|
||||
|
@ -225,18 +217,14 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
}
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
int size = in.readVInt();
|
||||
stats = new ArrayList<>();
|
||||
for (int i = 0; i < size; i++) {
|
||||
stats.add(in.readString());
|
||||
}
|
||||
stats = in.readList(StreamInput::readString);
|
||||
}
|
||||
suggestBuilder = in.readOptionalWriteable(SuggestBuilder::new);
|
||||
terminateAfter = in.readVInt();
|
||||
timeout = in.readOptionalWriteable(TimeValue::new);
|
||||
trackScores = in.readBoolean();
|
||||
version = in.readOptionalBoolean();
|
||||
ext = in.readOptionalBytesReference();
|
||||
extBuilders = in.readNamedWriteableList(SearchExtBuilder.class);
|
||||
profile = in.readBoolean();
|
||||
searchAfterBuilder = in.readOptionalWriteable(SearchAfterBuilder::new);
|
||||
sliceBuilder = in.readOptionalWriteable(SliceBuilder::new);
|
||||
|
@ -262,18 +250,12 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
boolean hasRescoreBuilders = rescoreBuilders != null;
|
||||
out.writeBoolean(hasRescoreBuilders);
|
||||
if (hasRescoreBuilders) {
|
||||
out.writeVInt(rescoreBuilders.size());
|
||||
for (RescoreBuilder<?> rescoreBuilder : rescoreBuilders) {
|
||||
out.writeNamedWriteable(rescoreBuilder);
|
||||
}
|
||||
out.writeNamedWriteableList(rescoreBuilders);
|
||||
}
|
||||
boolean hasScriptFields = scriptFields != null;
|
||||
out.writeBoolean(hasScriptFields);
|
||||
if (hasScriptFields) {
|
||||
out.writeVInt(scriptFields.size());
|
||||
for (ScriptField scriptField : scriptFields) {
|
||||
scriptField.writeTo(out);
|
||||
}
|
||||
out.writeList(scriptFields);
|
||||
}
|
||||
out.writeVInt(size);
|
||||
boolean hasSorts = sorts != null;
|
||||
|
@ -287,17 +269,14 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
boolean hasStats = stats != null;
|
||||
out.writeBoolean(hasStats);
|
||||
if (hasStats) {
|
||||
out.writeVInt(stats.size());
|
||||
for (String stat : stats) {
|
||||
out.writeString(stat);
|
||||
}
|
||||
out.writeStringList(stats);
|
||||
}
|
||||
out.writeOptionalWriteable(suggestBuilder);
|
||||
out.writeVInt(terminateAfter);
|
||||
out.writeOptionalWriteable(timeout);
|
||||
out.writeBoolean(trackScores);
|
||||
out.writeOptionalBoolean(version);
|
||||
out.writeOptionalBytesReference(ext);
|
||||
out.writeNamedWriteableList(extBuilders);
|
||||
out.writeBoolean(profile);
|
||||
out.writeOptionalWriteable(searchAfterBuilder);
|
||||
out.writeOptionalWriteable(sliceBuilder);
|
||||
|
@ -649,7 +628,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
/**
|
||||
* Gets the bytes representing the rescore builders for this request.
|
||||
*/
|
||||
public List<RescoreBuilder<?>> rescores() {
|
||||
public List<RescoreBuilder> rescores() {
|
||||
return rescoreBuilders;
|
||||
}
|
||||
|
||||
|
@ -875,13 +854,13 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
return stats;
|
||||
}
|
||||
|
||||
public SearchSourceBuilder ext(XContentBuilder ext) {
|
||||
this.ext = ext.bytes();
|
||||
public SearchSourceBuilder ext(List<SearchExtBuilder> searchExtBuilders) {
|
||||
this.extBuilders = Objects.requireNonNull(searchExtBuilders, "searchExtBuilders must not be null");
|
||||
return this;
|
||||
}
|
||||
|
||||
public BytesReference ext() {
|
||||
return ext;
|
||||
public List<SearchExtBuilder> ext() {
|
||||
return extBuilders;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -919,7 +898,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
SearchSourceBuilder rewrittenBuilder = new SearchSourceBuilder();
|
||||
rewrittenBuilder.aggregations = aggregations;
|
||||
rewrittenBuilder.explain = explain;
|
||||
rewrittenBuilder.ext = ext;
|
||||
rewrittenBuilder.extBuilders = extBuilders;
|
||||
rewrittenBuilder.fetchSourceContext = fetchSourceContext;
|
||||
rewrittenBuilder.docValueFields = docValueFields;
|
||||
rewrittenBuilder.storedFieldsContext = storedFieldsContext;
|
||||
|
@ -948,17 +927,18 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
/**
|
||||
* Parse some xContent into this SearchSourceBuilder, overwriting any values specified in the xContent. Use this if you need to set up
|
||||
* different defaults than a regular SearchSourceBuilder would have and use
|
||||
* {@link #fromXContent(QueryParseContext, AggregatorParsers, Suggesters)} if you have normal defaults.
|
||||
* {@link #fromXContent(QueryParseContext, AggregatorParsers, Suggesters, SearchExtRegistry)} if you have normal defaults.
|
||||
*/
|
||||
public void parseXContent(QueryParseContext context, AggregatorParsers aggParsers, Suggesters suggesters)
|
||||
public void parseXContent(QueryParseContext context, AggregatorParsers aggParsers,
|
||||
Suggesters suggesters, SearchExtRegistry searchExtRegistry)
|
||||
throws IOException {
|
||||
|
||||
XContentParser parser = context.parser();
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
String currentFieldName = null;
|
||||
if (token != XContentParser.Token.START_OBJECT && (token = parser.nextToken()) != XContentParser.Token.START_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT + "] but found [" + token + "]",
|
||||
parser.getTokenLocation());
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT +
|
||||
"] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
@ -1017,8 +997,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
} else if (token.isValue()) {
|
||||
indexBoost.put(currentFieldName, parser.floatValue());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
|
||||
parser.getTokenLocation());
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token +
|
||||
" in [" + currentFieldName + "].", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, AGGREGATIONS_FIELD)
|
||||
|
@ -1034,8 +1014,23 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
rescoreBuilders = new ArrayList<>();
|
||||
rescoreBuilders.add(RescoreBuilder.parseFromXContent(context));
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, EXT_FIELD)) {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
|
||||
ext = xContentBuilder.bytes();
|
||||
extBuilders = new ArrayList<>();
|
||||
String extSectionName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
extSectionName = parser.currentName();
|
||||
} else {
|
||||
SearchExtParser searchExtParser = searchExtRegistry.lookup(extSectionName,
|
||||
context.getParseFieldMatcher(), parser.getTokenLocation());
|
||||
SearchExtBuilder searchExtBuilder = searchExtParser.fromXContent(parser);
|
||||
if (searchExtBuilder.getWriteableName().equals(extSectionName) == false) {
|
||||
throw new IllegalStateException("The parsed [" + searchExtBuilder.getClass().getName() + "] object has a "
|
||||
+ "different writeable name compared to the name of the section that it was parsed from: found ["
|
||||
+ searchExtBuilder.getWriteableName() + "] expected [" + extSectionName + "]");
|
||||
}
|
||||
extBuilders.add(searchExtBuilder);
|
||||
}
|
||||
}
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, SLICE)) {
|
||||
sliceBuilder = SliceBuilder.fromXContent(context);
|
||||
} else {
|
||||
|
@ -1051,8 +1046,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
docValueFields.add(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in ["
|
||||
+ currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING +
|
||||
"] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
|
||||
|
@ -1068,8 +1063,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
stats.add(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] in ["
|
||||
+ currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING +
|
||||
"] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
|
||||
|
@ -1177,7 +1172,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
}
|
||||
|
||||
if (searchAfterBuilder != null) {
|
||||
builder.field(SEARCH_AFTER.getPreferredName(), searchAfterBuilder.getSortValues());
|
||||
builder.array(SEARCH_AFTER.getPreferredName(), searchAfterBuilder.getSortValues());
|
||||
}
|
||||
|
||||
if (sliceBuilder != null) {
|
||||
|
@ -1221,12 +1216,12 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
builder.field(STATS_FIELD.getPreferredName(), stats);
|
||||
}
|
||||
|
||||
if (ext != null) {
|
||||
builder.field(EXT_FIELD.getPreferredName());
|
||||
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(ext)) {
|
||||
parser.nextToken();
|
||||
builder.copyCurrentStructure(parser);
|
||||
if (extBuilders != null) {
|
||||
builder.startObject(EXT_FIELD.getPreferredName());
|
||||
for (SearchExtBuilder extBuilder : extBuilders) {
|
||||
extBuilder.toXContent(builder, params);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1344,9 +1339,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from,
|
||||
highlightBuilder, indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields,
|
||||
size, sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, profile);
|
||||
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder,
|
||||
indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size, sorts, searchAfterBuilder,
|
||||
sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, profile, extBuilders);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1381,7 +1376,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
&& Objects.equals(timeout, other.timeout)
|
||||
&& Objects.equals(trackScores, other.trackScores)
|
||||
&& Objects.equals(version, other.version)
|
||||
&& Objects.equals(profile, other.profile);
|
||||
&& Objects.equals(profile, other.profile)
|
||||
&& Objects.equals(extBuilders, other.extBuilders);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -37,7 +37,8 @@ import java.util.Collection;
|
|||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
*
|
||||
* Dfs phase of a search request, used to make scoring 100% accurate by collecting additional info from each shard before the query phase.
|
||||
* The additional information is used to better compare the scores coming from all the shards, which depend on local factors (e.g. idf)
|
||||
*/
|
||||
public class DfsPhase implements SearchPhase {
|
||||
|
||||
|
|
|
@ -43,7 +43,6 @@ import org.elasticsearch.index.mapper.ObjectMapper;
|
|||
import org.elasticsearch.index.mapper.SourceFieldMapper;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHitField;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.InnerHitsFetchSubPhase;
|
||||
|
@ -62,11 +61,11 @@ import java.util.List;
|
|||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.contentBuilder;
|
||||
|
||||
/**
|
||||
*
|
||||
* Fetch phase of a search request, used to fetch the actual top matching documents to be returned to the client, identified
|
||||
* after reducing all of the matches returned by the query phase
|
||||
*/
|
||||
public class FetchPhase implements SearchPhase {
|
||||
|
||||
|
@ -77,15 +76,6 @@ public class FetchPhase implements SearchPhase {
|
|||
this.fetchSubPhases[fetchSubPhases.size()] = new InnerHitsFetchSubPhase(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, ? extends SearchParseElement> parseElements() {
|
||||
Map<String, SearchParseElement> parseElements = new HashMap<>();
|
||||
for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
|
||||
parseElements.putAll(fetchSubPhase.parseElements());
|
||||
}
|
||||
return unmodifiableMap(parseElements);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preProcess(SearchContext context) {
|
||||
}
|
||||
|
|
|
@ -22,16 +22,14 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Sub phase within the fetch phase used to fetch things *about* the documents like highlghting or matched queries.
|
||||
* Sub phase within the fetch phase used to fetch things *about* the documents like highlighting or matched queries.
|
||||
*/
|
||||
public interface FetchSubPhase {
|
||||
|
||||
|
@ -69,10 +67,6 @@ public interface FetchSubPhase {
|
|||
return searcher.getIndexReader();
|
||||
}
|
||||
|
||||
public IndexSearcher topLevelSearcher() {
|
||||
return searcher;
|
||||
}
|
||||
|
||||
public Map<String, Object> cache() {
|
||||
if (cache == null) {
|
||||
cache = new HashMap<>();
|
||||
|
@ -82,10 +76,6 @@ public interface FetchSubPhase {
|
|||
|
||||
}
|
||||
|
||||
default Map<String, ? extends SearchParseElement> parseElements() {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
/**
|
||||
* Executes the hit level phase, with a reader and doc id (note, its a low level reader, and the matching doc).
|
||||
*/
|
||||
|
@ -93,23 +83,4 @@ public interface FetchSubPhase {
|
|||
|
||||
|
||||
default void hitsExecute(SearchContext context, InternalSearchHit[] hits) {}
|
||||
|
||||
/**
|
||||
* This interface is in the fetch phase plugin mechanism.
|
||||
* Whenever a new search is executed we create a new {@link SearchContext} that holds individual contexts for each {@link org.elasticsearch.search.fetch.FetchSubPhase}.
|
||||
* Fetch phases that use the plugin mechanism must provide a ContextFactory to the SearchContext that creates the fetch phase context and also associates them with a name.
|
||||
* See {@link SearchContext#getFetchSubPhaseContext(FetchSubPhase.ContextFactory)}
|
||||
*/
|
||||
interface ContextFactory<SubPhaseContext extends FetchSubPhaseContext> {
|
||||
|
||||
/**
|
||||
* The name of the context.
|
||||
*/
|
||||
String getName();
|
||||
|
||||
/**
|
||||
* Creates a new instance of a FetchSubPhaseContext that holds all information a FetchSubPhase needs to execute on hits.
|
||||
*/
|
||||
SubPhaseContext newContextInstance();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.fetch;
|
||||
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
|
||||
/**
|
||||
* All configuration and context needed by the FetchSubPhase to execute on hits.
|
||||
* The only required information in this base class is whether or not the sub phase needs to be run at all.
|
||||
* It can be extended by FetchSubPhases to hold information the phase needs to execute on hits.
|
||||
* See {@link org.elasticsearch.search.fetch.FetchSubPhase.ContextFactory} and also {@link DocValueFieldsContext} for an example.
|
||||
*/
|
||||
public class FetchSubPhaseContext {
|
||||
|
||||
// This is to store if the FetchSubPhase should be executed at all.
|
||||
private boolean hitExecutionNeeded = false;
|
||||
|
||||
/**
|
||||
* Set if this phase should be executed at all.
|
||||
*/
|
||||
public void setHitExecutionNeeded(boolean hitExecutionNeeded) {
|
||||
this.hitExecutionNeeded = hitExecutionNeeded;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns if this phase be executed at all.
|
||||
*/
|
||||
public boolean hitExecutionNeeded() {
|
||||
return hitExecutionNeeded;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,48 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.fetch;
|
||||
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
/**
|
||||
* A parse element for a {@link org.elasticsearch.search.fetch.FetchSubPhase} that is used when parsing a search request.
|
||||
*/
|
||||
public abstract class FetchSubPhaseParseElement<SubPhaseContext extends FetchSubPhaseContext> implements SearchParseElement {
|
||||
|
||||
@Override
|
||||
public final void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
SubPhaseContext fetchSubPhaseContext = context.getFetchSubPhaseContext(getContextFactory());
|
||||
// this is to make sure that the SubFetchPhase knows it should execute
|
||||
fetchSubPhaseContext.setHitExecutionNeeded(true);
|
||||
innerParse(parser, fetchSubPhaseContext, context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implement the actual parsing here.
|
||||
*/
|
||||
protected abstract void innerParse(XContentParser parser, SubPhaseContext fetchSubPhaseContext, SearchContext searchContext) throws Exception;
|
||||
|
||||
/**
|
||||
* Return the ContextFactory for this FetchSubPhase.
|
||||
*/
|
||||
protected abstract FetchSubPhase.ContextFactory<SubPhaseContext> getContextFactory();
|
||||
}
|
|
@ -18,38 +18,23 @@
|
|||
*/
|
||||
package org.elasticsearch.search.fetch.subphase;
|
||||
|
||||
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* All the required context to pull a field from the doc values.
|
||||
*/
|
||||
public class DocValueFieldsContext extends FetchSubPhaseContext {
|
||||
public class DocValueFieldsContext {
|
||||
|
||||
public static class DocValueField {
|
||||
private final String name;
|
||||
private final List<String> fields;
|
||||
|
||||
public DocValueField(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return name;
|
||||
}
|
||||
public DocValueFieldsContext(List<String> fields) {
|
||||
this.fields = fields;
|
||||
}
|
||||
|
||||
private List<DocValueField> fields = new ArrayList<>();
|
||||
|
||||
public DocValueFieldsContext() {
|
||||
}
|
||||
|
||||
public void add(DocValueField field) {
|
||||
this.fields.add(field);
|
||||
}
|
||||
|
||||
public List<DocValueField> fields() {
|
||||
/**
|
||||
* Returns the required docvalue fields
|
||||
*/
|
||||
public List<String> fields() {
|
||||
return this.fields;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,35 +36,21 @@ import java.util.HashMap;
|
|||
*/
|
||||
public final class DocValueFieldsFetchSubPhase implements FetchSubPhase {
|
||||
|
||||
public static final String NAME = "docvalue_fields";
|
||||
public static final ContextFactory<DocValueFieldsContext> CONTEXT_FACTORY = new ContextFactory<DocValueFieldsContext>() {
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocValueFieldsContext newContextInstance() {
|
||||
return new DocValueFieldsContext();
|
||||
}
|
||||
};
|
||||
|
||||
@Override
|
||||
public void hitExecute(SearchContext context, HitContext hitContext) {
|
||||
if (context.getFetchSubPhaseContext(CONTEXT_FACTORY).hitExecutionNeeded() == false) {
|
||||
if (context.docValueFieldsContext() == null) {
|
||||
return;
|
||||
}
|
||||
for (DocValueFieldsContext.DocValueField field : context.getFetchSubPhaseContext(CONTEXT_FACTORY).fields()) {
|
||||
for (String field : context.docValueFieldsContext().fields()) {
|
||||
if (hitContext.hit().fieldsOrNull() == null) {
|
||||
hitContext.hit().fields(new HashMap<>(2));
|
||||
}
|
||||
SearchHitField hitField = hitContext.hit().fields().get(field.name());
|
||||
SearchHitField hitField = hitContext.hit().fields().get(field);
|
||||
if (hitField == null) {
|
||||
hitField = new InternalSearchHitField(field.name(), new ArrayList<>(2));
|
||||
hitContext.hit().fields().put(field.name(), hitField);
|
||||
hitField = new InternalSearchHitField(field, new ArrayList<>(2));
|
||||
hitContext.hit().fields().put(field, hitField);
|
||||
}
|
||||
MappedFieldType fieldType = context.mapperService().fullName(field.name());
|
||||
MappedFieldType fieldType = context.mapperService().fullName(field);
|
||||
if (fieldType != null) {
|
||||
AtomicFieldData data = context.fieldData().getForField(fieldType).load(hitContext.readerContext());
|
||||
ScriptDocValues values = data.getScriptValues();
|
||||
|
|
|
@ -476,7 +476,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
|
|||
builder.field(FRAGMENT_OFFSET_FIELD.getPreferredName(), fragmentOffset);
|
||||
}
|
||||
if (matchedFields != null) {
|
||||
builder.field(MATCHED_FIELDS_FIELD.getPreferredName(), matchedFields);
|
||||
builder.array(MATCHED_FIELDS_FIELD.getPreferredName(), matchedFields);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
|
|
@ -50,17 +50,17 @@ import org.elasticsearch.index.mapper.TypeFieldMapper;
|
|||
import org.elasticsearch.index.query.AbstractQueryBuilder;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchExtBuilder;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
import org.elasticsearch.search.dfs.DfsSearchResult;
|
||||
import org.elasticsearch.search.fetch.FetchPhase;
|
||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
|
||||
|
@ -80,9 +80,6 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class DefaultSearchContext extends SearchContext {
|
||||
|
||||
private final long id;
|
||||
|
@ -110,6 +107,7 @@ public class DefaultSearchContext extends SearchContext {
|
|||
private StoredFieldsContext storedFields;
|
||||
private ScriptFieldsContext scriptFields;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
private DocValueFieldsContext docValueFieldsContext;
|
||||
private int from = -1;
|
||||
private int size = -1;
|
||||
private SortAndFormats sort;
|
||||
|
@ -148,7 +146,7 @@ public class DefaultSearchContext extends SearchContext {
|
|||
private volatile long lastAccessTime = -1;
|
||||
private Profilers profilers;
|
||||
|
||||
private final Map<String, FetchSubPhaseContext> subPhaseContexts = new HashMap<>();
|
||||
private final Map<String, SearchExtBuilder> searchExtBuilders = new HashMap<>();
|
||||
private final Map<Class<?>, Collector> queryCollectors = new HashMap<>();
|
||||
private final QueryShardContext queryShardContext;
|
||||
private FetchPhase fetchPhase;
|
||||
|
@ -388,14 +386,16 @@ public class DefaultSearchContext extends SearchContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory) {
|
||||
String subPhaseName = contextFactory.getName();
|
||||
if (subPhaseContexts.get(subPhaseName) == null) {
|
||||
subPhaseContexts.put(subPhaseName, contextFactory.newContextInstance());
|
||||
}
|
||||
return (SubPhaseContext) subPhaseContexts.get(subPhaseName);
|
||||
public void addSearchExt(SearchExtBuilder searchExtBuilder) {
|
||||
//it's ok to use the writeable name here given that we enforce it to be the same as the name of the element that gets
|
||||
//parsed by the corresponding parser. There is one single name and one single way to retrieve the parsed object from the context.
|
||||
searchExtBuilders.put(searchExtBuilder.getWriteableName(), searchExtBuilder);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchExtBuilder getSearchExt(String name) {
|
||||
return searchExtBuilders.get(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContextHighlight highlight() {
|
||||
|
@ -470,6 +470,17 @@ public class DefaultSearchContext extends SearchContext {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocValueFieldsContext docValueFieldsContext() {
|
||||
return docValueFieldsContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext) {
|
||||
this.docValueFieldsContext = docValueFieldsContext;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContextIndexSearcher searcher() {
|
||||
return this.searcher;
|
||||
|
|
|
@ -35,17 +35,16 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.ObjectMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchExtBuilder;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
import org.elasticsearch.search.dfs.DfsSearchResult;
|
||||
import org.elasticsearch.search.fetch.FetchPhase;
|
||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
|
||||
|
@ -512,8 +511,13 @@ public abstract class FilteredSearchContext extends SearchContext {
|
|||
}
|
||||
|
||||
@Override
|
||||
public <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory) {
|
||||
return in.getFetchSubPhaseContext(contextFactory);
|
||||
public void addSearchExt(SearchExtBuilder searchExtBuilder) {
|
||||
in.addSearchExt(searchExtBuilder);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchExtBuilder getSearchExt(String name) {
|
||||
return in.getSearchExt(name);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -22,9 +22,7 @@ package org.elasticsearch.search.internal;
|
|||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.apache.lucene.util.RefCount;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
|
@ -43,17 +41,17 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.ObjectMapper;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchExtBuilder;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
import org.elasticsearch.search.dfs.DfsSearchResult;
|
||||
import org.elasticsearch.search.fetch.FetchPhase;
|
||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhaseContext;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
|
||||
|
@ -187,7 +185,9 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
|
|||
|
||||
public abstract SearchContext aggregations(SearchContextAggregations aggregations);
|
||||
|
||||
public abstract <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory);
|
||||
public abstract void addSearchExt(SearchExtBuilder searchExtBuilder);
|
||||
|
||||
public abstract SearchExtBuilder getSearchExt(String name);
|
||||
|
||||
public abstract SearchContextHighlight highlight();
|
||||
|
||||
|
@ -226,6 +226,10 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
|
|||
|
||||
public abstract SearchContext fetchSourceContext(FetchSourceContext fetchSourceContext);
|
||||
|
||||
public abstract DocValueFieldsContext docValueFieldsContext();
|
||||
|
||||
public abstract SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext);
|
||||
|
||||
public abstract ContextIndexSearcher searcher();
|
||||
|
||||
public abstract IndexShard indexShard();
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.index.query.ParsedQuery;
|
|||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
|
||||
|
@ -36,8 +37,6 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
|||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class SubSearchContext extends FilteredSearchContext {
|
||||
|
||||
// By default return 3 hits per bucket. A higher default would make the response really large by default, since
|
||||
|
@ -60,6 +59,7 @@ public class SubSearchContext extends FilteredSearchContext {
|
|||
private StoredFieldsContext storedFields;
|
||||
private ScriptFieldsContext scriptFields;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
private DocValueFieldsContext docValueFieldsContext;
|
||||
private SearchContextHighlight highlight;
|
||||
|
||||
private boolean explain;
|
||||
|
@ -154,6 +154,17 @@ public class SubSearchContext extends FilteredSearchContext {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocValueFieldsContext docValueFieldsContext() {
|
||||
return docValueFieldsContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext docValueFieldsContext(DocValueFieldsContext docValueFieldsContext) {
|
||||
this.docValueFieldsContext = docValueFieldsContext;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void timeout(TimeValue timeout) {
|
||||
throw new UnsupportedOperationException("Not supported");
|
||||
|
|
|
@ -68,7 +68,8 @@ import java.util.List;
|
|||
import java.util.concurrent.Callable;
|
||||
|
||||
/**
|
||||
*
|
||||
* Query phase of a search request, used to run the query and get back from each shard information about the matching documents
|
||||
* (document ids and score or sort criteria) so that matches can be reduced on the coordinating node
|
||||
*/
|
||||
public class QueryPhase implements SearchPhase {
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue