[JAVA7 Upgrade] Replace try/finally with

This commit is contained in:
Simon Willnauer 2014-03-27 16:00:25 +01:00
parent 1952df982b
commit 5619ef5951
40 changed files with 67 additions and 206 deletions

View File

@ -92,8 +92,7 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
* Sets the source for the request.
*/
public ClusterRerouteRequest source(BytesReference source) throws Exception {
XContentParser parser = XContentHelper.createParser(source);
try {
try (XContentParser parser = XContentHelper.createParser(source)) {
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -113,8 +112,6 @@ public class ClusterRerouteRequest extends AcknowledgedRequest<ClusterRerouteReq
}
}
}
} finally {
parser.close();
}
return this;
}

View File

@ -257,9 +257,8 @@ public class BulkRequest extends ActionRequest<BulkRequest> {
break;
}
// now parse the action
XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from));
try {
try (XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from))) {
// move pointers
from = nextMarker + 1;
@ -379,8 +378,6 @@ public class BulkRequest extends ActionRequest<BulkRequest> {
// move pointers
from = nextMarker + 1;
}
} finally {
parser.close();
}
}
return this;

View File

@ -279,8 +279,7 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
}
public MultiGetRequest add(@Nullable String defaultIndex, @Nullable String defaultType, @Nullable String[] defaultFields, @Nullable FetchSourceContext defaultFetchSource, @Nullable String defaultRouting, BytesReference data, boolean allowExplicitIndex) throws Exception {
XContentParser parser = XContentFactory.xContent(data).createParser(data);
try {
try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) {
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
@ -400,8 +399,6 @@ public class MultiGetRequest extends ActionRequest<MultiGetRequest> implements I
}
}
}
} finally {
parser.close();
}
return this;
}

View File

@ -102,8 +102,7 @@ public class MultiPercolateRequest extends ActionRequest<MultiPercolateRequest>
// now parse the action
if (nextMarker - from > 0) {
XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from));
try {
try (XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from))) {
// Move to START_OBJECT, if token is null, its an empty data
XContentParser.Token token = parser.nextToken();
if (token != null) {
@ -127,8 +126,6 @@ public class MultiPercolateRequest extends ActionRequest<MultiPercolateRequest>
throw new ElasticsearchParseException(percolateAction + " isn't a supported percolate operation");
}
}
} finally {
parser.close();
}
}

View File

@ -280,13 +280,10 @@ public class PercolateSourceBuilder implements ToXContent {
if (contentType == builder.contentType()) {
builder.rawField("doc", doc);
} else {
XContentParser parser = XContentFactory.xContent(contentType).createParser(doc);
try {
try (XContentParser parser = XContentFactory.xContent(contentType).createParser(doc)) {
parser.nextToken();
builder.field("doc");
builder.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
return builder;

View File

@ -115,8 +115,7 @@ public class MultiSearchRequest extends ActionRequest<MultiSearchRequest> {
// now parse the action
if (nextMarker - from > 0) {
XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from));
try {
try (XContentParser parser = xContent.createParser(data.slice(from, nextMarker - from))) {
// Move to START_OBJECT, if token is null, its an empty data
XContentParser.Token token = parser.nextToken();
if (token != null) {
@ -180,8 +179,6 @@ public class MultiSearchRequest extends ActionRequest<MultiSearchRequest> {
}
}
}
} finally {
parser.close();
}
}
searchRequest.indicesOptions(IndicesOptions.fromOptions(ignoreUnavailable, allowNoIndices, expandWildcardsOpen, expandWildcardsClosed));

View File

@ -79,8 +79,7 @@ public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsReque
XContentParser.Token token;
String currentFieldName = null;
if (data.length() > 0) {
XContentParser parser = XContentFactory.xContent(data).createParser(data);
try {
try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
@ -118,9 +117,6 @@ public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsReque
}
}
}
finally {
parser.close();
}
}
for (String id : ids) {
TermVectorRequest curRequest = new TermVectorRequest(template);

View File

@ -536,8 +536,7 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
public UpdateRequest source(BytesReference source) throws Exception {
XContentType xContentType = XContentFactory.xContentType(source);
XContentParser parser = XContentFactory.xContent(xContentType).createParser(source);
try {
try (XContentParser parser = XContentFactory.xContent(xContentType).createParser(source)) {
XContentParser.Token t = parser.nextToken();
if (t == null) {
return this;
@ -564,8 +563,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
docAsUpsert(parser.booleanValue());
}
}
} finally {
parser.close();
}
return this;
}

View File

@ -183,11 +183,8 @@ public class AliasMetaData {
return this;
}
try {
XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
try {
try (XContentParser parser = XContentFactory.xContent(filter).createParser(filter)) {
filter(parser.mapOrdered());
} finally {
parser.close();
}
return this;
} catch (IOException e) {

View File

@ -477,11 +477,8 @@ public class IndexMetaData {
}
public Builder putMapping(String type, String source) throws IOException {
XContentParser parser = XContentFactory.xContent(source).createParser(source);
try {
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
putMapping(new MappingMetaData(type, parser.mapOrdered()));
} finally {
parser.close();
}
return this;
}

View File

@ -51,8 +51,7 @@ public class Strings {
private static final char EXTENSION_SEPARATOR = '.';
public static void tabify(int tabs, String from, StringBuilder to) throws Exception {
final BufferedReader reader = new BufferedReader(new FastStringReader(from));
try {
try (BufferedReader reader = new BufferedReader(new FastStringReader(from))) {
String line;
while ((line = reader.readLine()) != null) {
for (int i = 0; i < tabs; i++) {
@ -60,14 +59,11 @@ public class Strings {
}
to.append(line).append('\n');
}
} finally {
reader.close();
}
}
public static void spaceify(int spaces, String from, StringBuilder to) throws Exception {
final BufferedReader reader = new BufferedReader(new FastStringReader(from));
try {
try (BufferedReader reader = new BufferedReader(new FastStringReader(from))) {
String line;
while ((line = reader.readLine()) != null) {
for (int i = 0; i < spaces; i++) {
@ -75,8 +71,6 @@ public class Strings {
}
to.append(line).append('\n');
}
} finally {
reader.close();
}
}

View File

@ -453,40 +453,37 @@ public final class Errors implements Serializable {
* Returns the formatted message for an exception with the specified messages.
*/
public static String format(String heading, Collection<Message> errorMessages) {
final Formatter fmt = new Formatter(Locale.ROOT);
try {
try (Formatter fmt = new Formatter(Locale.ROOT)) {
fmt.format(heading).format(":%n%n");
int index = 1;
boolean displayCauses = getOnlyCause(errorMessages) == null;
for (Message errorMessage : errorMessages) {
fmt.format("%s) %s%n", index++, errorMessage.getMessage());
List<Object> dependencies = errorMessage.getSources();
for (int i = dependencies.size() - 1; i >= 0; i--) {
Object source = dependencies.get(i);
formatSource(fmt, source);
}
Throwable cause = errorMessage.getCause();
if (displayCauses && cause != null) {
StringWriter writer = new StringWriter();
cause.printStackTrace(new PrintWriter(writer));
fmt.format("Caused by: %s", writer.getBuffer());
}
fmt.format("%n");
}
if (errorMessages.size() == 1) {
fmt.format("1 error");
} else {
fmt.format("%s errors", errorMessages.size());
}
return fmt.toString();
} finally {
fmt.close();
}
}

View File

@ -41,21 +41,15 @@ public abstract class XContentSettingsLoader implements SettingsLoader {
@Override
public Map<String, String> load(String source) throws IOException {
XContentParser parser = XContentFactory.xContent(contentType()).createParser(source);
try {
try (XContentParser parser = XContentFactory.xContent(contentType()).createParser(source)) {
return load(parser);
} finally {
parser.close();
}
}
@Override
public Map<String, String> load(byte[] source) throws IOException {
XContentParser parser = XContentFactory.xContent(contentType()).createParser(source);
try {
try (XContentParser parser = XContentFactory.xContent(contentType()).createParser(source)) {
return load(parser);
} finally {
parser.close();
}
}

View File

@ -363,12 +363,9 @@ public class XContentHelper {
if (contentType == rawBuilder.contentType()) {
Streams.copy(compressedStreamInput, rawBuilder.stream());
} else {
XContentParser parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput);
try {
try (XContentParser parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput)) {
parser.nextToken();
rawBuilder.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
} else {
@ -376,12 +373,9 @@ public class XContentHelper {
if (contentType == rawBuilder.contentType()) {
source.writeTo(rawBuilder.stream());
} else {
XContentParser parser = XContentFactory.xContent(contentType).createParser(source);
try {
try (XContentParser parser = XContentFactory.xContent(contentType).createParser(source)) {
parser.nextToken();
rawBuilder.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
}
@ -400,13 +394,10 @@ public class XContentHelper {
if (contentType == builder.contentType()) {
builder.rawField(field, compressedStreamInput);
} else {
XContentParser parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput);
try {
try (XContentParser parser = XContentFactory.xContent(contentType).createParser(compressedStreamInput)) {
parser.nextToken();
builder.field(field);
builder.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
} else {
@ -414,13 +405,10 @@ public class XContentHelper {
if (contentType == builder.contentType()) {
builder.rawField(field, source);
} else {
XContentParser parser = XContentFactory.xContent(contentType).createParser(source);
try {
try (XContentParser parser = XContentFactory.xContent(contentType).createParser(source)) {
parser.nextToken();
builder.field(field);
builder.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
}

View File

@ -51,24 +51,18 @@ public class SmileXContentGenerator extends JsonXContentGenerator {
@Override
public void writeRawField(String fieldName, InputStream content, OutputStream bos) throws IOException {
writeFieldName(fieldName);
SmileParser parser = SmileXContent.smileFactory.createParser(content);
try {
try (SmileParser parser = SmileXContent.smileFactory.createParser(content)) {
parser.nextToken();
generator.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
@Override
public void writeRawField(String fieldName, byte[] content, OutputStream bos) throws IOException {
writeFieldName(fieldName);
SmileParser parser = SmileXContent.smileFactory.createParser(content);
try {
try (SmileParser parser = SmileXContent.smileFactory.createParser(content)) {
parser.nextToken();
generator.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
@ -92,12 +86,9 @@ public class SmileXContentGenerator extends JsonXContentGenerator {
@Override
public void writeRawField(String fieldName, byte[] content, int offset, int length, OutputStream bos) throws IOException {
writeFieldName(fieldName);
SmileParser parser = SmileXContent.smileFactory.createParser(content, offset, length);
try {
try (SmileParser parser = SmileXContent.smileFactory.createParser(content, offset, length)) {
parser.nextToken();
generator.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
}

View File

@ -51,24 +51,18 @@ public class YamlXContentGenerator extends JsonXContentGenerator {
@Override
public void writeRawField(String fieldName, InputStream content, OutputStream bos) throws IOException {
writeFieldName(fieldName);
YAMLParser parser = YamlXContent.yamlFactory.createParser(content);
try {
try (YAMLParser parser = YamlXContent.yamlFactory.createParser(content)) {
parser.nextToken();
generator.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
@Override
public void writeRawField(String fieldName, byte[] content, OutputStream bos) throws IOException {
writeFieldName(fieldName);
YAMLParser parser = YamlXContent.yamlFactory.createParser(content);
try {
try (YAMLParser parser = YamlXContent.yamlFactory.createParser(content)) {
parser.nextToken();
generator.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
@ -92,12 +86,9 @@ public class YamlXContentGenerator extends JsonXContentGenerator {
@Override
public void writeRawField(String fieldName, byte[] content, int offset, int length, OutputStream bos) throws IOException {
writeFieldName(fieldName);
YAMLParser parser = YamlXContent.yamlFactory.createParser(content, offset, length);
try {
try (YAMLParser parser = YamlXContent.yamlFactory.createParser(content, offset, length)) {
parser.nextToken();
generator.copyCurrentStructure(parser);
} finally {
parser.close();
}
}
}

View File

@ -589,8 +589,7 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
if (data.length == 0) {
continue;
}
XContentParser parser = XContentHelper.createParser(data, 0, data.length);
try {
try (XContentParser parser = XContentHelper.createParser(data, 0, data.length)) {
String currentFieldName = null;
XContentParser.Token token = parser.nextToken();
if (token != null) {
@ -608,8 +607,6 @@ public class LocalGatewayMetaState extends AbstractComponent implements ClusterS
}
}
}
} finally {
parser.close();
}
index = fileIndex;
metaDataFile = stateFile;

View File

@ -133,12 +133,9 @@ public class IndexAliasesService extends AbstractIndexComponent implements Itera
}
try {
byte[] filterSource = filter.uncompressed();
XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource);
try {
try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) {
ParsedFilter parsedFilter = indexQueryParser.parseInnerFilter(parser);
return parsedFilter == null ? null : parsedFilter.filter();
} finally {
parser.close();
}
} catch (IOException ex) {
throw new AliasFilterParsingException(index, alias, "Invalid alias filter", ex);

View File

@ -89,9 +89,8 @@ public class DoubleArrayIndexFieldData extends AbstractIndexFieldData<DoubleArra
values.add(0); // first "t" indicates null value
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio);
boolean success = false;
try {
try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) {
final BytesRefIterator iter = builder.buildFromTerms(getNumericType().wrapTermsEnum(terms.iterator(null)));
BytesRef term;
while ((term = iter.next()) != null) {
@ -132,7 +131,7 @@ public class DoubleArrayIndexFieldData extends AbstractIndexFieldData<DoubleArra
if (success) {
estimator.afterLoad(null, data.getMemorySizeInBytes());
}
builder.close();
}
}

View File

@ -82,10 +82,9 @@ public class FSTBytesIndexFieldData extends AbstractBytesIndexFieldData<FSTBytes
numTerms = -1;
}
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
OrdinalsBuilder builder = new OrdinalsBuilder(numTerms, reader.maxDoc(), acceptableTransientOverheadRatio);
boolean success = false;
try {
try (OrdinalsBuilder builder = new OrdinalsBuilder(numTerms, reader.maxDoc(), acceptableTransientOverheadRatio)) {
// we don't store an ord 0 in the FST since we could have an empty string in there and FST don't support
// empty strings twice. ie. them merge fails for long output.
TermsEnum termsEnum = filter(terms, reader);
@ -93,13 +92,13 @@ public class FSTBytesIndexFieldData extends AbstractBytesIndexFieldData<FSTBytes
for (BytesRef term = termsEnum.next(); term != null; term = termsEnum.next()) {
final long termOrd = builder.nextOrdinal();
assert termOrd > 0;
fstBuilder.add(Util.toIntsRef(term, scratch), (long)termOrd);
fstBuilder.add(Util.toIntsRef(term, scratch), (long) termOrd);
docsEnum = termsEnum.docs(null, docsEnum, DocsEnum.FLAG_NONE);
for (int docId = docsEnum.nextDoc(); docId != DocsEnum.NO_MORE_DOCS; docId = docsEnum.nextDoc()) {
builder.addDoc(docId);
}
}
FST<Long> fst = fstBuilder.finish();
final Ordinals ordinals = builder.build(fieldDataType.getSettings());
@ -111,7 +110,7 @@ public class FSTBytesIndexFieldData extends AbstractBytesIndexFieldData<FSTBytes
if (success) {
estimator.afterLoad(null, data.getMemorySizeInBytes());
}
builder.close();
}
}
}

View File

@ -88,9 +88,8 @@ public class FloatArrayIndexFieldData extends AbstractIndexFieldData<FloatArrayA
values.add(0); // first "t" indicates null value
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio);
boolean success = false;
try {
try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) {
BytesRefIterator iter = builder.buildFromTerms(getNumericType().wrapTermsEnum(terms.iterator(null)));
BytesRef term;
while ((term = iter.next()) != null) {
@ -131,7 +130,7 @@ public class FloatArrayIndexFieldData extends AbstractIndexFieldData<FloatArrayA
if (success) {
estimator.afterLoad(null, data.getMemorySizeInBytes());
}
builder.close();
}
}

View File

@ -98,9 +98,8 @@ public class GeoPointCompressedIndexFieldData extends AbstractGeoPointIndexField
PagedMutable lat = new PagedMutable(initialSize, pageSize, encoding.numBitsPerCoordinate(), PackedInts.COMPACT);
PagedMutable lon = new PagedMutable(initialSize, pageSize, encoding.numBitsPerCoordinate(), PackedInts.COMPACT);
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
OrdinalsBuilder builder = new OrdinalsBuilder(terms.size(), reader.maxDoc(), acceptableTransientOverheadRatio);
boolean success = false;
try {
try (OrdinalsBuilder builder = new OrdinalsBuilder(terms.size(), reader.maxDoc(), acceptableTransientOverheadRatio)) {
final GeoPointEnum iter = new GeoPointEnum(builder.buildFromTerms(terms.iterator(null)));
GeoPoint point;
long ord = 0;
@ -145,7 +144,7 @@ public class GeoPointCompressedIndexFieldData extends AbstractGeoPointIndexField
if (success) {
estimator.afterLoad(null, data.getMemorySizeInBytes());
}
builder.close();
}
}

View File

@ -74,9 +74,8 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractGeoPointIndexFiel
lat.add(0); // first "t" indicates null value
lon.add(0); // first "t" indicates null value
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
OrdinalsBuilder builder = new OrdinalsBuilder(terms.size(), reader.maxDoc(), acceptableTransientOverheadRatio);
boolean success = false;
try {
try (OrdinalsBuilder builder = new OrdinalsBuilder(terms.size(), reader.maxDoc(), acceptableTransientOverheadRatio)) {
final GeoPointEnum iter = new GeoPointEnum(builder.buildFromTerms(terms.iterator(null)));
GeoPoint point;
while ((point = iter.next()) != null) {
@ -110,7 +109,7 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractGeoPointIndexFiel
if (success) {
estimator.afterLoad(null, data.getMemorySizeInBytes());
}
builder.close();
}
}

View File

@ -112,10 +112,9 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
final MonotonicAppendingLongBuffer values = new MonotonicAppendingLongBuffer();
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
OrdinalsBuilder builder = new OrdinalsBuilder(-1, reader.maxDoc(), acceptableTransientOverheadRatio);
TermsEnum termsEnum = estimator.beforeLoad(terms);
boolean success = false;
try {
try (OrdinalsBuilder builder = new OrdinalsBuilder(-1, reader.maxDoc(), acceptableTransientOverheadRatio)) {
BytesRefIterator iter = builder.buildFromTerms(termsEnum);
BytesRef term;
assert !getNumericType().isFloatingPoint();
@ -205,7 +204,7 @@ public class PackedArrayIndexFieldData extends AbstractIndexFieldData<AtomicNume
// Adjust as usual, based on the actual size of the field data
estimator.afterLoad(termsEnum, data.getMemorySizeInBytes());
}
builder.close();
}
}

View File

@ -83,8 +83,6 @@ public class PagedBytesIndexFieldData extends AbstractBytesIndexFieldData<PagedB
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat(
FilterSettingFields.ACCEPTABLE_TRANSIENT_OVERHEAD_RATIO, OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
OrdinalsBuilder builder = new OrdinalsBuilder(numTerms, reader.maxDoc(), acceptableTransientOverheadRatio);
// Wrap the context in an estimator and use it to either estimate
// the entire set, or wrap the TermsEnum so it can be calculated
// per-term
@ -92,7 +90,7 @@ public class PagedBytesIndexFieldData extends AbstractBytesIndexFieldData<PagedB
TermsEnum termsEnum = estimator.beforeLoad(terms);
boolean success = false;
try {
try (OrdinalsBuilder builder = new OrdinalsBuilder(numTerms, reader.maxDoc(), acceptableTransientOverheadRatio)) {
// 0 is reserved for "unset"
bytes.copyUsingLengthPrefix(new BytesRef());
@ -121,7 +119,7 @@ public class PagedBytesIndexFieldData extends AbstractBytesIndexFieldData<PagedB
// Call .afterLoad() to adjust the breaker now that we have an exact size
estimator.afterLoad(termsEnum, data.getMemorySizeInBytes());
}
builder.close();
}
}

View File

@ -120,8 +120,7 @@ public class CommitPoints implements Iterable<CommitPoint> {
}
public static CommitPoint fromXContent(byte[] data) throws Exception {
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(data);
try {
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(data)) {
String currentFieldName = null;
XContentParser.Token token = parser.nextToken();
if (token == null) {
@ -197,8 +196,6 @@ public class CommitPoints implements Iterable<CommitPoint> {
}
return new CommitPoint(version, name, type, indexFiles, translogFiles);
} finally {
parser.close();
}
}
}

View File

@ -207,20 +207,17 @@ public class CommonTermsQueryParser implements QueryParser {
private final Query parseQueryString(ExtendedCommonTermsQuery query, String queryString, String field, QueryParseContext parseContext,
Analyzer analyzer, String lowFreqMinimumShouldMatch, String highFreqMinimumShouldMatch, MapperService.SmartNameFieldMappers smartNameFieldMappers) throws IOException {
// Logic similar to QueryParser#getFieldQuery
TokenStream source = analyzer.tokenStream(field, queryString.toString());
int count = 0;
try {
try (TokenStream source = analyzer.tokenStream(field, queryString.toString())) {
source.reset();
CharTermAttribute termAtt = source.addAttribute(CharTermAttribute.class);
while (source.incrementToken()) {
BytesRef ref = new BytesRef(termAtt.length() * 4); // oversize for
// UTF-8
// UTF-8
UnicodeUtil.UTF16toUTF8(termAtt.buffer(), 0, termAtt.length(), ref);
query.add(new Term(field, ref));
count++;
}
} finally {
source.close();
}
if (count == 0) {

View File

@ -64,15 +64,12 @@ public class TemplateQueryParser implements QueryParser {
ExecutableScript executable = this.scriptService.executable("mustache", templateContext.template(), templateContext.params());
BytesReference querySource = (BytesReference) executable.run();
XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
try {
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
final QueryParseContext context = new QueryParseContext(parseContext.index(), parseContext.indexQueryParser);
context.reset(qSourceParser);
Query result = context.parseInnerQuery();
parser.nextToken();
return result;
} finally {
qSourceParser.close();
}
}

View File

@ -57,15 +57,12 @@ public class WrapperFilterParser implements FilterParser {
parser.nextToken();
byte[] querySource = parser.binaryValue();
XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
try {
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
final QueryParseContext context = new QueryParseContext(parseContext.index(), parseContext.indexQueryParser);
context.reset(qSourceParser);
Filter result = context.parseInnerFilter();
parser.nextToken();
return result;
} finally {
qSourceParser.close();
}
}
}

View File

@ -57,15 +57,12 @@ public class WrapperQueryParser implements QueryParser {
parser.nextToken();
byte[] querySource = parser.binaryValue();
XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
try {
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
final QueryParseContext context = new QueryParseContext(parseContext.index(), parseContext.indexQueryParser);
context.reset(qSourceParser);
Query result = context.parseInnerQuery();
parser.nextToken();
return result;
} finally {
qSourceParser.close();
}
}
}

View File

@ -79,14 +79,11 @@ public class DeleteByQueryWrappingFilter extends Filter {
} else {
IndexReader indexReader = searcher.getIndexReader();
if (!contains(indexReader, context)) {
IndexReader multiReader = new MultiReader(new IndexReader[]{indexReader, context.reader()}, false);
try {
try (IndexReader multiReader = new MultiReader(new IndexReader[]{indexReader, context.reader()}, false)) {
Similarity similarity = searcher.getSimilarity();
searcher = new IndexSearcher(multiReader);
searcher.setSimilarity(similarity);
weight = searcher.createNormalizedWeight(query);
} finally {
multiReader.close();
}
}
}

View File

@ -224,12 +224,9 @@ public class BlobStoreIndexShardRepository extends AbstractComponent implements
* @throws IOException
*/
public static BlobStoreIndexShardSnapshot readSnapshot(byte[] data) throws IOException {
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(data);
try {
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(data)) {
parser.nextToken();
return BlobStoreIndexShardSnapshot.fromXContent(parser);
} finally {
parser.close();
}
}

View File

@ -211,15 +211,12 @@ public class Store extends AbstractIndexShardComponent implements CloseableIndex
if (lastFound == -1) {
return defaultValue;
}
IndexInput indexInput = lastDir.openInput(CHECKSUMS_PREFIX + lastFound, IOContext.READONCE);
try {
try (IndexInput indexInput = lastDir.openInput(CHECKSUMS_PREFIX + lastFound, IOContext.READONCE)) {
indexInput.readInt(); // version
return indexInput.readStringStringMap();
} catch (Throwable e) {
// failed to load checksums, ignore and return an empty map
return defaultValue;
} finally {
indexInput.close();
}
}
@ -237,12 +234,9 @@ public class Store extends AbstractIndexShardComponent implements CloseableIndex
while (directory.fileExists(checksumName)) {
checksumName = CHECKSUMS_PREFIX + System.currentTimeMillis();
}
IndexOutput output = directory.createOutput(checksumName, IOContext.DEFAULT, true);
try {
try (IndexOutput output = directory.createOutput(checksumName, IOContext.DEFAULT, true)) {
output.writeInt(0); // version
output.writeStringStringMap(checksums);
} finally {
output.close();
}
}

View File

@ -52,8 +52,7 @@ public class RestIndicesAliasesAction extends BaseRestHandler {
IndicesAliasesRequest indicesAliasesRequest = new IndicesAliasesRequest();
indicesAliasesRequest.listenerThreaded(false);
indicesAliasesRequest.masterNodeTimeout(request.paramAsTime("master_timeout", indicesAliasesRequest.masterNodeTimeout()));
XContentParser parser = null;
try {
try (XContentParser parser = XContentFactory.xContent(request.content()).createParser(request.content())) {
// {
// actions : [
// { add : { index : "test1", alias : "alias1", filter : {"user" : "kimchy"} } }
@ -61,7 +60,6 @@ public class RestIndicesAliasesAction extends BaseRestHandler {
// ]
// }
indicesAliasesRequest.timeout(request.paramAsTime("timeout", indicesAliasesRequest.timeout()));
parser = XContentFactory.xContent(request.content()).createParser(request.content());
XContentParser.Token token = parser.nextToken();
if (token == null) {
throw new ElasticsearchIllegalArgumentException("No action is specified");
@ -140,8 +138,6 @@ public class RestIndicesAliasesAction extends BaseRestHandler {
logger.warn("Failed to send response", e1);
}
return;
} finally {
parser.close();
}
client.admin().indices().aliases(indicesAliasesRequest, new AcknowledgedRestResponseActionListener<IndicesAliasesResponse>(request, channel, logger));
}

View File

@ -36,12 +36,9 @@ public class AggregationBinaryParseElement extends AggregationParseElement {
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
byte[] facetSource = parser.binaryValue();
XContentParser aSourceParser = XContentFactory.xContent(facetSource).createParser(facetSource);
try {
try (XContentParser aSourceParser = XContentFactory.xContent(facetSource).createParser(facetSource)) {
aSourceParser.nextToken(); // move past the first START_OBJECT
super.parse(aSourceParser, context);
} finally {
aSourceParser.close();
}
}
}

View File

@ -36,12 +36,9 @@ public class FacetBinaryParseElement extends FacetParseElement {
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
byte[] facetSource = parser.binaryValue();
XContentParser fSourceParser = XContentFactory.xContent(facetSource).createParser(facetSource);
try {
try (XContentParser fSourceParser = XContentFactory.xContent(facetSource).createParser(facetSource)) {
fSourceParser.nextToken(); // move past the first START_OBJECT
super.parse(fSourceParser, context);
} finally {
fSourceParser.close();
}
}
}

View File

@ -32,14 +32,11 @@ public class FilterBinaryParseElement implements SearchParseElement {
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
byte[] filterSource = parser.binaryValue();
XContentParser fSourceParser = XContentFactory.xContent(filterSource).createParser(filterSource);
try {
try (XContentParser fSourceParser = XContentFactory.xContent(filterSource).createParser(filterSource)) {
ParsedFilter filter = context.queryParserService().parseInnerFilter(fSourceParser);
if (filter != null) {
context.parsedPostFilter(filter);
}
} finally {
fSourceParser.close();
}
}
}

View File

@ -32,11 +32,8 @@ public class QueryBinaryParseElement implements SearchParseElement {
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
byte[] querySource = parser.binaryValue();
XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
try {
try (XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource)) {
context.parsedQuery(context.queryParserService().parse(qSourceParser));
} finally {
qSourceParser.close();
}
}
}

View File

@ -122,13 +122,10 @@ public class IndexWarmersMetaData implements IndexMetaData.Custom {
map = (Map<String, Object>) map.values().iterator().next();
}
XContentBuilder builder = XContentFactory.smileBuilder().map(map);
XContentParser parser = XContentFactory.xContent(XContentType.SMILE).createParser(builder.bytes());
try {
try (XContentParser parser = XContentFactory.xContent(XContentType.SMILE).createParser(builder.bytes())) {
// move to START_OBJECT
parser.nextToken();
return fromXContent(parser);
} finally {
parser.close();
}
}

View File

@ -62,12 +62,9 @@ public class RestTestSuiteParser implements RestTestFragmentParser<RestTestSuite
IOUtils.close(randomAccessFile);
}
XContentParser parser = YamlXContent.yamlXContent.createParser(new FileInputStream(file));
try {
try (XContentParser parser = YamlXContent.yamlXContent.createParser(new FileInputStream(file))) {
RestTestSuiteParseContext testParseContext = new RestTestSuiteParseContext(api, filename, parser, currentVersion);
return parse(testParseContext);
} finally {
parser.close();
}
}