Remove unused private methods and fields (#47154)
This commit removes a bunch of unused private fields and unused private methods from the code base. Backport of (#47115)
This commit is contained in:
parent
9a64b7a888
commit
95e2ca741e
|
@ -113,8 +113,6 @@ public class DetailAnalyzeResponse {
|
|||
private final String name;
|
||||
private final AnalyzeResponse.AnalyzeToken[] tokens;
|
||||
|
||||
private static final String TOKENS = "tokens";
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
|
|
|
@ -21,12 +21,10 @@ package org.elasticsearch.client;
|
|||
|
||||
import org.elasticsearch.client.migration.DeprecationInfoRequest;
|
||||
import org.elasticsearch.client.migration.DeprecationInfoResponse;
|
||||
import org.elasticsearch.client.tasks.TaskSubmissionResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.function.BooleanSupplier;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -42,20 +40,4 @@ public class MigrationIT extends ESRestHighLevelClientTestCase {
|
|||
assertThat(response.getNodeSettingsIssues().size(), equalTo(0));
|
||||
assertThat(response.getMlSettingsIssues().size(), equalTo(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* Using low-level api as high-level-rest-client's getTaskById work is in progress.
|
||||
* TODO revisit once that work is finished
|
||||
*/
|
||||
private BooleanSupplier checkCompletionStatus(TaskSubmissionResponse upgrade) {
|
||||
return () -> {
|
||||
try {
|
||||
Response response = client().performRequest(new Request("GET", "/_tasks/" + upgrade.getTask()));
|
||||
return (boolean) entityAsMap(response).get("completed");
|
||||
} catch (IOException e) {
|
||||
fail(e.getMessage());
|
||||
return false;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,8 +33,4 @@ public class DeleteModelSnapshotRequestTests extends ESTestCase {
|
|||
-> new DeleteModelSnapshotRequest(randomAlphaOfLength(10), null));
|
||||
assertEquals("[snapshot_id] must not be null", ex.getMessage());
|
||||
}
|
||||
|
||||
private DeleteModelSnapshotRequest createTestInstance() {
|
||||
return new DeleteModelSnapshotRequest(randomAlphaOfLength(10), randomAlphaOfLength(10));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,8 +19,6 @@
|
|||
|
||||
package org.elasticsearch.threadpool;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -263,7 +261,6 @@ public interface Scheduler {
|
|||
* tasks to the uncaught exception handler
|
||||
*/
|
||||
class SafeScheduledThreadPoolExecutor extends ScheduledThreadPoolExecutor {
|
||||
private static final Logger logger = LogManager.getLogger(SafeScheduledThreadPoolExecutor.class);
|
||||
|
||||
@SuppressForbidden(reason = "properly rethrowing errors, see EsExecutors.rethrowErrors")
|
||||
public SafeScheduledThreadPoolExecutor(int corePoolSize, ThreadFactory threadFactory, RejectedExecutionHandler handler) {
|
||||
|
|
|
@ -360,10 +360,6 @@ public class IndexCreationTaskTests extends ESTestCase {
|
|||
.numberOfReplicas(numReplicas);
|
||||
}
|
||||
|
||||
private Map<String, String> createCustom() {
|
||||
return Collections.singletonMap("a", "b");
|
||||
}
|
||||
|
||||
private interface MetaDataBuilderConfigurator {
|
||||
void configure(IndexTemplateMetaData.Builder builder) throws IOException;
|
||||
}
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.common.xcontent;
|
|||
import com.fasterxml.jackson.core.JsonGenerationException;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.Constants;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -1208,11 +1207,6 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
assertThat(e.getMessage(), containsString("Field name cannot be null"));
|
||||
}
|
||||
|
||||
private static void expectNonNullFormatterException(ThrowingRunnable runnable) {
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, runnable);
|
||||
assertThat(e.getMessage(), containsString("DateTimeFormatter cannot be null"));
|
||||
}
|
||||
|
||||
private static void expectObjectException(ThrowingRunnable runnable) {
|
||||
JsonGenerationException e = expectThrows(JsonGenerationException.class, runnable);
|
||||
assertThat(e.getMessage(), containsString("Current context not Object"));
|
||||
|
|
|
@ -56,7 +56,6 @@ import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
|||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation;
|
||||
import org.elasticsearch.search.aggregations.MultiBucketConsumerService;
|
||||
import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation;
|
||||
|
@ -523,16 +522,6 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
|
|||
return documents;
|
||||
}
|
||||
|
||||
|
||||
private InternalAggregation buildInternalAggregation(RareTermsAggregationBuilder builder, MappedFieldType fieldType,
|
||||
IndexSearcher searcher) throws IOException {
|
||||
AbstractRareTermsAggregator aggregator = createAggregator(builder, searcher, fieldType);
|
||||
aggregator.preCollection();
|
||||
searcher.search(new MatchAllDocsQuery(), aggregator);
|
||||
aggregator.postCollection();
|
||||
return aggregator.buildAggregation(0L);
|
||||
}
|
||||
|
||||
private void testSearchCase(Query query, List<Long> dataset,
|
||||
Consumer<RareTermsAggregationBuilder> configure,
|
||||
Consumer<InternalMappedRareTerms> verify, ValueType valueType) throws IOException {
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
package org.elasticsearch.index.shard;
|
||||
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexNotFoundException;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.flush.FlushRequest;
|
||||
|
@ -695,20 +694,6 @@ public abstract class IndexShardTestCase extends ESTestCase {
|
|||
inSyncIds, newRoutingTable);
|
||||
}
|
||||
|
||||
private Store.MetadataSnapshot getMetadataSnapshotOrEmpty(IndexShard replica) throws IOException {
|
||||
Store.MetadataSnapshot result;
|
||||
try {
|
||||
result = replica.snapshotStoreMetadata();
|
||||
} catch (IndexNotFoundException e) {
|
||||
// OK!
|
||||
result = Store.MetadataSnapshot.EMPTY;
|
||||
} catch (IOException e) {
|
||||
logger.warn("failed read store, treating as empty", e);
|
||||
result = Store.MetadataSnapshot.EMPTY;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Set<String> getShardDocUIDs(final IndexShard shard) throws IOException {
|
||||
return getDocIdAndSeqNos(shard).stream().map(DocIdSeqNoAndSource::getId).collect(Collectors.toSet());
|
||||
}
|
||||
|
|
|
@ -35,8 +35,6 @@ import java.util.Map;
|
|||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
@ -49,20 +47,7 @@ import static java.util.Collections.emptyMap;
|
|||
*/
|
||||
public abstract class AnalysisFactoryTestCase extends ESTestCase {
|
||||
|
||||
private static final Pattern UNDERSCORE_THEN_ANYTHING = Pattern.compile("_(.)");
|
||||
|
||||
private static String toCamelCase(String s) {
|
||||
Matcher m = UNDERSCORE_THEN_ANYTHING.matcher(s);
|
||||
StringBuffer sb = new StringBuffer();
|
||||
while (m.find()) {
|
||||
m.appendReplacement(sb, m.group(1).toUpperCase(Locale.ROOT));
|
||||
}
|
||||
m.appendTail(sb);
|
||||
sb.setCharAt(0, Character.toUpperCase(sb.charAt(0)));
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
static final Map<String,Class<?>> KNOWN_TOKENIZERS = new MapBuilder<String,Class<?>>()
|
||||
private static final Map<String,Class<?>> KNOWN_TOKENIZERS = new MapBuilder<String,Class<?>>()
|
||||
// exposed in ES
|
||||
.put("classic", MovedToAnalysisCommon.class)
|
||||
.put("edgengram", MovedToAnalysisCommon.class)
|
||||
|
|
|
@ -162,10 +162,6 @@ public class TransportOpenJobAction extends TransportMasterNodeAction<OpenJobAct
|
|||
return node.getVersion().onOrAfter(job.getModelSnapshotMinVersion());
|
||||
}
|
||||
|
||||
private static boolean jobHasRules(Job job) {
|
||||
return job.getAnalysisConfig().getDetectors().stream().anyMatch(d -> d.getRules().isEmpty() == false);
|
||||
}
|
||||
|
||||
public static String nodeFilter(DiscoveryNode node, Job job) {
|
||||
|
||||
String jobId = job.getId();
|
||||
|
|
|
@ -549,21 +549,6 @@ public class JobConfigProvider {
|
|||
|
||||
}
|
||||
|
||||
private SearchRequest makeExpandIdsSearchRequest(String expression, boolean excludeDeleting) {
|
||||
String [] tokens = ExpandedIdsMatcher.tokenizeExpression(expression);
|
||||
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().query(buildQuery(tokens, excludeDeleting));
|
||||
sourceBuilder.sort(Job.ID.getPreferredName());
|
||||
sourceBuilder.fetchSource(false);
|
||||
sourceBuilder.docValueField(Job.ID.getPreferredName(), null);
|
||||
sourceBuilder.docValueField(Job.GROUPS.getPreferredName(), null);
|
||||
|
||||
return client.prepareSearch(AnomalyDetectorsIndex.configIndexName())
|
||||
.setIndicesOptions(IndicesOptions.lenientExpandOpen())
|
||||
.setSource(sourceBuilder)
|
||||
.setSize(AnomalyDetectorsIndex.CONFIG_INDEX_MAX_RESULTS_WINDOW)
|
||||
.request();
|
||||
}
|
||||
|
||||
/**
|
||||
* The same logic as {@link #expandJobsIds(String, boolean, boolean, ActionListener)} but
|
||||
* the full anomaly detector job configuration is returned.
|
||||
|
|
|
@ -1037,10 +1037,6 @@ public class JobResultsProviderTests extends ESTestCase {
|
|||
verifyNoMoreInteractions(client);
|
||||
}
|
||||
|
||||
private Bucket createBucketAtEpochTime(long epoch) {
|
||||
return new Bucket("foo", new Date(epoch), 123);
|
||||
}
|
||||
|
||||
private JobResultsProvider createProvider(Client client) {
|
||||
return new JobResultsProvider(client, Settings.EMPTY);
|
||||
}
|
||||
|
|
|
@ -12,19 +12,15 @@ import org.elasticsearch.cli.ExitCodes;
|
|||
import org.elasticsearch.cli.UserException;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.CheckedSupplier;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
import org.elasticsearch.common.settings.KeyStoreWrapper;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.TestEnvironment;
|
||||
import org.elasticsearch.protocol.xpack.XPackInfoResponse;
|
||||
|
@ -443,21 +439,6 @@ public class SetupPasswordToolTests extends CommandTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private String parsePassword(String value) throws IOException {
|
||||
try (XContentParser parser = JsonXContent.jsonXContent
|
||||
.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, value)) {
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parser.nextToken() == XContentParser.Token.FIELD_NAME) {
|
||||
if (parser.nextToken() == XContentParser.Token.VALUE_STRING) {
|
||||
return parser.text();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
throw new RuntimeException("Did not properly parse password.");
|
||||
}
|
||||
|
||||
private URL authenticateUrl(URL url) throws MalformedURLException, URISyntaxException {
|
||||
return new URL(url, (url.toURI().getPath() + "/_security/_authenticate").replaceAll("/+", "/") + "?pretty");
|
||||
}
|
||||
|
|
|
@ -178,10 +178,4 @@ public class TriggerServiceTests extends ESTestCase {
|
|||
newActions.add(actionWrapper);
|
||||
when(watch.actions()).thenReturn(newActions);
|
||||
}
|
||||
|
||||
private void setTransform(Watch watch, String type) {
|
||||
ExecutableTransform transform = mock(ExecutableTransform.class);
|
||||
when(transform.type()).thenReturn(type);
|
||||
when(watch.transform()).thenReturn(transform);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue