Build: Fix compiler warnings
We have a handful of compiler warnings, mostly because of passing an array to varargs methods. This change fixes these warnings and adds -Werror so we don't get anymore of these warnings. Note this does *not* enable deprecation or unchecked type warnings, so these remain "hidden". We should work towards removing those as well, but this is a first step.
This commit is contained in:
parent
ea4c27a089
commit
2d15cab68c
|
@ -36,6 +36,8 @@ import org.elasticsearch.node.settings.NodeSettingsService;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Close index action
|
||||
*/
|
||||
|
@ -92,7 +94,7 @@ public class TransportCloseIndexAction extends TransportMasterNodeAction<CloseIn
|
|||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.debug("failed to close indices [{}]", t, concreteIndices);
|
||||
logger.debug("failed to close indices [{}]", t, (Object)concreteIndices);
|
||||
listener.onFailure(t);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -36,6 +36,8 @@ import org.elasticsearch.node.settings.NodeSettingsService;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Open index action
|
||||
*/
|
||||
|
@ -92,7 +94,7 @@ public class TransportOpenIndexAction extends TransportMasterNodeAction<OpenInde
|
|||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.debug("failed to open indices [{}]", t, concreteIndices);
|
||||
logger.debug("failed to open indices [{}]", t, (Object)concreteIndices);
|
||||
listener.onFailure(t);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -35,6 +35,8 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -90,7 +92,7 @@ public class TransportUpdateSettingsAction extends TransportMasterNodeAction<Upd
|
|||
|
||||
@Override
|
||||
public void onFailure(Throwable t) {
|
||||
logger.debug("failed to update settings on indices [{}]", t, concreteIndices);
|
||||
logger.debug("failed to update settings on indices [{}]", t, (Object)concreteIndices);
|
||||
listener.onFailure(t);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.env.NodeEnvironment;
|
|||
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -166,7 +167,7 @@ public class Gateway extends AbstractComponent implements ClusterStateListener {
|
|||
public void reset() throws Exception {
|
||||
try {
|
||||
Path[] dataPaths = nodeEnv.nodeDataPaths();
|
||||
logger.trace("removing node data paths: [{}]", dataPaths);
|
||||
logger.trace("removing node data paths: [{}]", (Object)dataPaths);
|
||||
IOUtils.rm(dataPaths);
|
||||
} catch (Exception ex) {
|
||||
logger.debug("failed to delete shard locations", ex);
|
||||
|
|
|
@ -77,10 +77,10 @@ public class IndicesQueryParser implements QueryParser {
|
|||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
innerQuery = new XContentStructure.InnerQuery(parseContext, null);
|
||||
innerQuery = new XContentStructure.InnerQuery(parseContext, (String[])null);
|
||||
queryFound = true;
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) {
|
||||
innerNoMatchQuery = new XContentStructure.InnerQuery(parseContext, null);
|
||||
innerNoMatchQuery = new XContentStructure.InnerQuery(parseContext, (String[])null);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ public class ScoreFunctionParserMapper {
|
|||
public ScoreFunctionParser get(QueryParseContext parseContext, String parserName) {
|
||||
ScoreFunctionParser functionParser = get(parserName);
|
||||
if (functionParser == null) {
|
||||
throw new QueryParsingException(parseContext, "No function with the name [" + parserName + "] is registered.", null);
|
||||
throw new QueryParsingException(parseContext, "No function with the name [" + parserName + "] is registered.");
|
||||
}
|
||||
return functionParser;
|
||||
}
|
||||
|
|
|
@ -40,7 +40,7 @@ public class PercentilesBucketBuilder extends BucketMetricsBuilder<PercentilesBu
|
|||
@Override
|
||||
protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (percents != null) {
|
||||
builder.field(PercentilesBucketParser.PERCENTS.getPreferredName(), percents);
|
||||
builder.field(PercentilesBucketParser.PERCENTS.getPreferredName(), (Object[])percents);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -442,7 +442,7 @@ public class NettyTransport extends AbstractLifecycleComponent<Transport> implem
|
|||
for (int i = 0; i < hostAddresses.length; i++) {
|
||||
addresses[i] = NetworkAddress.format(hostAddresses[i]);
|
||||
}
|
||||
logger.debug("binding server bootstrap to: {}", addresses);
|
||||
logger.debug("binding server bootstrap to: {}", (Object)addresses);
|
||||
}
|
||||
for (InetAddress hostAddress : hostAddresses) {
|
||||
bindServerBootstrap(name, hostAddress, settings);
|
||||
|
|
|
@ -60,7 +60,7 @@ public class BasicAnalysisBackwardCompatibilityIT extends ESBackcompatTestCase {
|
|||
fields[i] = "type=string,analyzer=" + analyzer;
|
||||
}
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type", fields)
|
||||
.addMapping("type", (Object[])fields)
|
||||
.setSettings(indexSettings()));
|
||||
ensureYellow();
|
||||
InputOutput[] inout = new InputOutput[numFields];
|
||||
|
|
|
@ -114,14 +114,14 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
|
|||
String[] results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY);
|
||||
assertEquals(3, results.length);
|
||||
|
||||
results = indexNameExpressionResolver.concreteIndices(context, null);
|
||||
results = indexNameExpressionResolver.concreteIndices(context, (String[])null);
|
||||
assertEquals(3, results.length);
|
||||
|
||||
context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictExpand());
|
||||
results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY);
|
||||
assertEquals(4, results.length);
|
||||
|
||||
results = indexNameExpressionResolver.concreteIndices(context, null);
|
||||
results = indexNameExpressionResolver.concreteIndices(context, (String[])null);
|
||||
assertEquals(4, results.length);
|
||||
|
||||
context = new IndexNameExpressionResolver.Context(state, IndicesOptions.strictExpandOpen());
|
||||
|
@ -348,7 +348,7 @@ public class IndexNameExpressionResolverTests extends ESTestCase {
|
|||
assertEquals(2, results.length);
|
||||
assertThat(results, arrayContainingInAnyOrder("foo", "foobar"));
|
||||
|
||||
results = indexNameExpressionResolver.concreteIndices(context, null);
|
||||
results = indexNameExpressionResolver.concreteIndices(context, (String[])null);
|
||||
assertEquals(0, results.length);
|
||||
|
||||
results = indexNameExpressionResolver.concreteIndices(context, Strings.EMPTY_ARRAY);
|
||||
|
|
|
@ -179,7 +179,7 @@ public class MultiDataPathUpgraderTests extends ESTestCase {
|
|||
OldIndexBackwardsCompatibilityIT.copyIndex(logger, src, indexName, multiDataPath);
|
||||
final ShardPath shardPath = new ShardPath(false, nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], nodeEnvironment.availableShardPaths(new ShardId(indexName, 0))[0], IndexMetaData.INDEX_UUID_NA_VALUE, new ShardId(indexName, 0));
|
||||
|
||||
logger.info("{}", FileSystemUtils.files(shardPath.resolveIndex()));
|
||||
logger.info("{}", (Object)FileSystemUtils.files(shardPath.resolveIndex()));
|
||||
|
||||
MultiDataPathUpgrader helper = new MultiDataPathUpgrader(nodeEnvironment);
|
||||
helper.upgrade(new ShardId(indexName, 0), shardPath);
|
||||
|
|
|
@ -148,7 +148,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
ensureGreen();
|
||||
NodeEnvironment env = getInstanceFromNode(NodeEnvironment.class);
|
||||
Path[] shardPaths = env.availableShardPaths(new ShardId("test", 0));
|
||||
logger.info("--> paths: [{}]", shardPaths);
|
||||
logger.info("--> paths: [{}]", (Object)shardPaths);
|
||||
// Should not be able to acquire the lock because it's already open
|
||||
try {
|
||||
NodeEnvironment.acquireFSLockForPaths(Settings.EMPTY, shardPaths);
|
||||
|
|
|
@ -196,7 +196,7 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
|
|||
@Test(expected = ActionRequestValidationException.class)
|
||||
public void testCloseNullIndex() {
|
||||
Client client = client();
|
||||
client.admin().indices().prepareClose(null).execute().actionGet();
|
||||
client.admin().indices().prepareClose((String[])null).execute().actionGet();
|
||||
}
|
||||
|
||||
@Test(expected = ActionRequestValidationException.class)
|
||||
|
@ -208,7 +208,7 @@ public class OpenCloseIndexIT extends ESIntegTestCase {
|
|||
@Test(expected = ActionRequestValidationException.class)
|
||||
public void testOpenNullIndex() {
|
||||
Client client = client();
|
||||
client.admin().indices().prepareOpen(null).execute().actionGet();
|
||||
client.admin().indices().prepareOpen((String[])null).execute().actionGet();
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -263,7 +263,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
@Test
|
||||
public void testThatInvalidGetIndexTemplatesFails() throws Exception {
|
||||
logger.info("--> get template null");
|
||||
testExpectActionRequestValidationException(null);
|
||||
testExpectActionRequestValidationException((String[])null);
|
||||
|
||||
logger.info("--> get template empty");
|
||||
testExpectActionRequestValidationException("");
|
||||
|
|
|
@ -1365,7 +1365,8 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertHitCount(searchResponse, 1l);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(1));
|
||||
|
||||
searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery(null).ids("1"))).get();
|
||||
// TODO: why do we even support passing null??
|
||||
searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery((String[])null).ids("1"))).get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(1));
|
||||
|
||||
|
@ -1403,7 +1404,7 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
assertHitCount(searchResponse, 1l);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(1));
|
||||
|
||||
searchResponse = client().prepareSearch().setQuery(idsQuery(null).ids("1")).get();
|
||||
searchResponse = client().prepareSearch().setQuery(idsQuery((String[])null).ids("1")).get();
|
||||
assertHitCount(searchResponse, 1l);
|
||||
assertThat(searchResponse.getHits().hits().length, equalTo(1));
|
||||
|
||||
|
|
|
@ -170,7 +170,7 @@ public class ContextSuggestSearchIT extends ESIntegTestCase {
|
|||
.startObject("context")
|
||||
.startObject("location")
|
||||
.field("type", "geo")
|
||||
.array("precision", precisions.toArray(new Integer[precisions.size()]))
|
||||
.array("precision", (Object[])precisions.toArray(new Integer[precisions.size()]))
|
||||
.endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject();
|
||||
|
@ -185,7 +185,7 @@ public class ContextSuggestSearchIT extends ESIntegTestCase {
|
|||
.startObject("context")
|
||||
.startObject("location")
|
||||
.field("type", "geo")
|
||||
.array("precision", precisions.toArray(new Integer[precisions.size()]))
|
||||
.array("precision", (Object[])precisions.toArray(new Integer[precisions.size()]))
|
||||
.endObject()
|
||||
.endObject().endObject()
|
||||
.endObject().endObject();
|
||||
|
|
Loading…
Reference in New Issue