mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-09 14:34:43 +00:00
Fix errors reported by error-prone
I compiled elasticsearch with error-prone (https://github.com/google/error-prone). This commit fixes the 13 errors reported by the tool.
This commit is contained in:
parent
0beda40069
commit
9a29705c49
@ -61,5 +61,6 @@ public interface Compressor {
|
||||
/**
|
||||
* @deprecated Used for backward comp. since we now use Lucene compressed codec.
|
||||
*/
|
||||
@Deprecated
|
||||
CompressedIndexInput indexInput(IndexInput in) throws IOException;
|
||||
}
|
||||
|
@ -332,6 +332,7 @@ public final class UTF8StreamWriter extends Writer {
|
||||
/**
|
||||
* @deprecated Replaced by {@link #setOutput(OutputStream)}
|
||||
*/
|
||||
@Deprecated
|
||||
public UTF8StreamWriter setOutputStream(OutputStream out) {
|
||||
return this.setOutput(out);
|
||||
}
|
||||
|
@ -173,7 +173,7 @@ public class ZenDiscovery extends AbstractLifecycleComponent<Discovery> implemen
|
||||
this.rejoinOnMasterGone = settings.getAsBoolean(SETTING_REJOIN_ON_MASTER_GONE, true);
|
||||
|
||||
if (this.joinRetryAttempts < 1) {
|
||||
throw new ElasticsearchIllegalArgumentException("'" + SETTING_JOIN_RETRY_ATTEMPTS + "' must be a positive number. got [" + this.SETTING_JOIN_RETRY_ATTEMPTS + "]");
|
||||
throw new ElasticsearchIllegalArgumentException("'" + SETTING_JOIN_RETRY_ATTEMPTS + "' must be a positive number. got [" + SETTING_JOIN_RETRY_ATTEMPTS + "]");
|
||||
}
|
||||
if (this.maxPingsFromAnotherMaster < 1) {
|
||||
throw new ElasticsearchIllegalArgumentException("'" + SETTING_MAX_PINGS_FROM_ANOTHER_MASTER + "' must be a positive number. got [" + this.maxPingsFromAnotherMaster + "]");
|
||||
|
@ -93,7 +93,7 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory {
|
||||
if (version.onOrAfter(Version.LUCENE_4_4)) {
|
||||
return new KeepWordFilter(tokenStream, keepWords);
|
||||
} else {
|
||||
@SuppressWarnings("deprecated")
|
||||
@SuppressWarnings("deprecation")
|
||||
final TokenStream filter = new Lucene43KeepWordFilter(enablePositionIncrements, tokenStream, keepWords);
|
||||
return filter;
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ public class LengthTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
if (version.onOrAfter(Version.LUCENE_4_4)) {
|
||||
return new LengthFilter(tokenStream, min, max);
|
||||
} else {
|
||||
@SuppressWarnings("deprecated")
|
||||
@SuppressWarnings("deprecation")
|
||||
final TokenStream filter = new Lucene43LengthFilter(enablePositionIncrements, tokenStream, min, max);
|
||||
return filter;
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ public class TrimTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
if (version.onOrAfter(Version.LUCENE_4_4_0)) {
|
||||
return new TrimFilter(tokenStream);
|
||||
} else {
|
||||
@SuppressWarnings("deprecated")
|
||||
@SuppressWarnings("deprecation")
|
||||
final TokenStream filter = new Lucene43TrimFilter(tokenStream, updateOffsets);
|
||||
return filter;
|
||||
}
|
||||
|
@ -98,7 +98,7 @@ public enum PreBuiltTokenFilters {
|
||||
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_4_0)) {
|
||||
return new StopFilter(tokenStream, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
|
||||
} else {
|
||||
@SuppressWarnings("deprecated")
|
||||
@SuppressWarnings("deprecation")
|
||||
final TokenStream filter = new Lucene43StopFilter(true, tokenStream, StopAnalyzer.ENGLISH_STOP_WORDS_SET);
|
||||
return filter;
|
||||
}
|
||||
@ -111,7 +111,7 @@ public enum PreBuiltTokenFilters {
|
||||
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_4_0)) {
|
||||
return new TrimFilter(tokenStream);
|
||||
} else {
|
||||
@SuppressWarnings("deprecated")
|
||||
@SuppressWarnings("deprecation")
|
||||
final TokenStream filter = new Lucene43TrimFilter(tokenStream, true);
|
||||
return filter;
|
||||
}
|
||||
@ -138,7 +138,7 @@ public enum PreBuiltTokenFilters {
|
||||
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_4_0)) {
|
||||
return new LengthFilter(tokenStream, 0, Integer.MAX_VALUE);
|
||||
} else {
|
||||
@SuppressWarnings("deprecated")
|
||||
@SuppressWarnings("deprecation")
|
||||
final TokenStream filter = new Lucene43LengthFilter(true, tokenStream, 0, Integer.MAX_VALUE);
|
||||
return filter;
|
||||
}
|
||||
@ -200,7 +200,7 @@ public enum PreBuiltTokenFilters {
|
||||
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_4_0)) {
|
||||
return new NGramTokenFilter(tokenStream);
|
||||
} else {
|
||||
@SuppressWarnings("deprecated")
|
||||
@SuppressWarnings("deprecation")
|
||||
final TokenStream filter = new Lucene43NGramTokenFilter(tokenStream);
|
||||
return filter;
|
||||
}
|
||||
@ -213,7 +213,7 @@ public enum PreBuiltTokenFilters {
|
||||
if (version.luceneVersion.onOrAfter(org.apache.lucene.util.Version.LUCENE_4_4_0)) {
|
||||
return new EdgeNGramTokenFilter(tokenStream, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE);
|
||||
} else {
|
||||
@SuppressWarnings("deprecated")
|
||||
@SuppressWarnings("deprecation")
|
||||
final TokenStream filter = new Lucene43EdgeNGramTokenFilter(tokenStream, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE);
|
||||
return filter;
|
||||
}
|
||||
|
@ -235,7 +235,7 @@ public class RecoverySourceHandler implements Engine.RecoveryHandler {
|
||||
// recovered while ongoing large segment recoveries are
|
||||
// happening. It also allows these pools to be configured
|
||||
// separately.
|
||||
if (fileSize > recoverySettings.SMALL_FILE_CUTOFF_BYTES) {
|
||||
if (fileSize > RecoverySettings.SMALL_FILE_CUTOFF_BYTES) {
|
||||
pool = recoverySettings.concurrentStreamPool();
|
||||
} else {
|
||||
pool = recoverySettings.concurrentSmallFileStreamPool();
|
||||
|
@ -160,8 +160,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
if (cacheExpire != null) {
|
||||
cacheBuilder.expireAfterAccess(cacheExpire.nanos(), TimeUnit.NANOSECONDS);
|
||||
}
|
||||
cacheBuilder.removalListener(new ScriptCacheRemovalListener());
|
||||
this.cache = cacheBuilder.build();
|
||||
this.cache = cacheBuilder.removalListener(new ScriptCacheRemovalListener()).build();
|
||||
|
||||
ImmutableMap.Builder<String, ScriptEngineService> enginesByLangBuilder = ImmutableMap.builder();
|
||||
ImmutableMap.Builder<String, ScriptEngineService> enginesByExtBuilder = ImmutableMap.builder();
|
||||
|
@ -581,74 +581,63 @@ public class MetaDataTests extends ElasticsearchTestCase {
|
||||
|
||||
@Test
|
||||
public void testIsAllIndices_null() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isAllIndices(null), equalTo(true));
|
||||
assertThat(MetaData.isAllIndices(null), equalTo(true));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsAllIndices_empty() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isAllIndices(new String[0]), equalTo(true));
|
||||
assertThat(MetaData.isAllIndices(new String[0]), equalTo(true));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsAllIndices_explicitAll() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isAllIndices(new String[]{"_all"}), equalTo(true));
|
||||
assertThat(MetaData.isAllIndices(new String[]{"_all"}), equalTo(true));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsAllIndices_explicitAllPlusOther() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isAllIndices(new String[]{"_all", "other"}), equalTo(false));
|
||||
assertThat(MetaData.isAllIndices(new String[]{"_all", "other"}), equalTo(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsAllIndices_normalIndexes() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isAllIndices(new String[]{"index1", "index2", "index3"}), equalTo(false));
|
||||
assertThat(MetaData.isAllIndices(new String[]{"index1", "index2", "index3"}), equalTo(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsAllIndices_wildcard() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isAllIndices(new String[]{"*"}), equalTo(false));
|
||||
assertThat(MetaData.isAllIndices(new String[]{"*"}), equalTo(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsExplicitAllIndices_null() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isExplicitAllPattern(null), equalTo(false));
|
||||
assertThat(MetaData.isExplicitAllPattern(null), equalTo(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsExplicitAllIndices_empty() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isExplicitAllPattern(new String[0]), equalTo(false));
|
||||
assertThat(MetaData.isExplicitAllPattern(new String[0]), equalTo(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsExplicitAllIndices_explicitAll() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isExplicitAllPattern(new String[]{"_all"}), equalTo(true));
|
||||
assertThat(MetaData.isExplicitAllPattern(new String[]{"_all"}), equalTo(true));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsExplicitAllIndices_explicitAllPlusOther() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isExplicitAllPattern(new String[]{"_all", "other"}), equalTo(false));
|
||||
assertThat(MetaData.isExplicitAllPattern(new String[]{"_all", "other"}), equalTo(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsExplicitAllIndices_normalIndexes() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isExplicitAllPattern(new String[]{"index1", "index2", "index3"}), equalTo(false));
|
||||
assertThat(MetaData.isExplicitAllPattern(new String[]{"index1", "index2", "index3"}), equalTo(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIsExplicitAllIndices_wildcard() throws Exception {
|
||||
MetaData metaData = MetaData.builder().build();
|
||||
assertThat(metaData.isExplicitAllPattern(new String[]{"*"}), equalTo(false));
|
||||
assertThat(MetaData.isExplicitAllPattern(new String[]{"*"}), equalTo(false));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -185,7 +185,7 @@ public class ShapeBuilderTests extends ElasticsearchTestCase {
|
||||
.point(40.0, -50.0)
|
||||
.close().build();
|
||||
fail("Polygon self-intersection");
|
||||
} catch (Throwable e) {}
|
||||
} catch (Exception e) {}
|
||||
|
||||
}
|
||||
|
||||
|
@ -186,7 +186,7 @@ public class TemplateQueryTest extends ElasticsearchIntegrationTest {
|
||||
searchRequest.templateSource(bytesRef, false);
|
||||
client().search(searchRequest).get();
|
||||
fail("expected exception");
|
||||
} catch (Throwable ex) {
|
||||
} catch (Exception ex) {
|
||||
// expected - no params
|
||||
}
|
||||
String query = "{ \"template\" : { \"query\": {\"match_all\": {}}, \"size\" : \"{{my_size}}\" }, \"params\" : { \"my_size\": 1 } }";
|
||||
|
@ -93,7 +93,7 @@ public class GeoPointParsingTests extends ElasticsearchTestCase {
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
assertTrue(false);
|
||||
} catch (Throwable e) {}
|
||||
} catch (Exception e) {}
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -109,7 +109,7 @@ public class GeoPointParsingTests extends ElasticsearchTestCase {
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
assertTrue(false);
|
||||
} catch (Throwable e) {}
|
||||
} catch (Exception e) {}
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -125,7 +125,7 @@ public class GeoPointParsingTests extends ElasticsearchTestCase {
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
assertTrue(false);
|
||||
} catch (Throwable e) {}
|
||||
} catch (Exception e) {}
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -141,7 +141,7 @@ public class GeoPointParsingTests extends ElasticsearchTestCase {
|
||||
try {
|
||||
GeoUtils.parseGeoPoint(parser);
|
||||
assertTrue(false);
|
||||
} catch (Throwable e) {}
|
||||
} catch (Exception e) {}
|
||||
}
|
||||
|
||||
private static XContentParser objectLatLon(double lat, double lon) throws IOException {
|
||||
|
Loading…
x
Reference in New Issue
Block a user