SOLR-1748, SOLR-1747, SOLR-1746, SOLR-1745, SOLR-1744: Streams and Readers retrieved from ContentStreams are not closed in various places, resulting in file descriptor leaks.

git-svn-id: https://svn.apache.org/repos/asf/lucene/solr/trunk@906553 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mark Robert Miller 2010-02-04 16:26:38 +00:00
parent 1444299662
commit db62a20003
6 changed files with 108 additions and 60 deletions

View File

@ -849,6 +849,11 @@ Bug Fixes
75. SOLR-1529: More than 8 deleteByQuery commands in a single request
caused an error to be returned, although the deletes were
still executed. (asmodean via yonik)
76. SOLR-1748, SOLR-1747, SOLR-1746, SOLR-1745, SOLR-1744: Streams and Readers
retrieved from ContentStreams are not closed in various places, resulting
in file descriptor leaks.
(Christoff Brill, Mark Miller)
Other Changes
----------------------

View File

@ -49,7 +49,15 @@ public class BinaryUpdateRequestHandler extends ContentStreamHandlerBase {
protected ContentStreamLoader newLoader(SolrQueryRequest req, final UpdateRequestProcessor processor) {
return new ContentStreamLoader() {
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream) throws Exception {
parseAndLoadDocs(req, rsp, stream.getStream(), processor);
InputStream is = null;
try {
is = stream.getStream();
parseAndLoadDocs(req, rsp, is, processor);
} finally {
if(is != null) {
is.close();
}
}
}
};
}

View File

@ -18,6 +18,7 @@
package org.apache.solr.handler;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import org.apache.commons.io.IOUtils;
@ -45,7 +46,12 @@ public class DumpRequestHandler extends RequestHandlerBase
stream.add( "sourceInfo", content.getSourceInfo() );
stream.add( "size", content.getSize() );
stream.add( "contentType", content.getContentType() );
stream.add( "stream", IOUtils.toString( content.getStream() ) );
InputStream is = content.getStream();
try {
stream.add( "stream", IOUtils.toString(is) );
} finally {
is.close();
}
streams.add( stream );
}
rsp.add( "streams", streams );

View File

@ -23,11 +23,9 @@ import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.lucene.document.Document;
@ -40,7 +38,6 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.similar.MoreLikeThis;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.DisMaxParams;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.params.MoreLikeThisParams;
import org.apache.solr.common.params.SolrParams;
@ -84,24 +81,7 @@ public class MoreLikeThisHandler extends RequestHandlerBase
SolrParams params = req.getParams();
SolrIndexSearcher searcher = req.getSearcher();
// Parse Required Params
// This will either have a single Reader or valid query
Reader reader = null;
String q = params.get( CommonParams.Q );
if( q == null || q.trim().length() <1 ) {
Iterable<ContentStream> streams = req.getContentStreams();
if( streams != null ) {
Iterator<ContentStream> iter = streams.iterator();
if( iter.hasNext() ) {
reader = iter.next().getReader();
}
if( iter.hasNext() ) {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,
"MoreLikeThis does not support multiple ContentStreams" );
}
}
}
MoreLikeThisHelper mlt = new MoreLikeThisHelper( params, searcher );
List<Query> filters = SolrPluginUtils.parseFilterQueries(req);
@ -110,46 +90,75 @@ public class MoreLikeThisHandler extends RequestHandlerBase
List<InterestingTerm> interesting = (termStyle == TermStyle.NONE )
? null : new ArrayList<InterestingTerm>( mlt.mlt.getMaxQueryTerms() );
// What fields do we need to return
String fl = params.get(CommonParams.FL);
int flags = 0;
if (fl != null) {
flags |= SolrPluginUtils.setReturnFields(fl, rsp);
}
int start = params.getInt( CommonParams.START, 0 );
int rows = params.getInt( CommonParams.ROWS, 10 );
DocListAndSet mltDocs = null;
String q = params.get( CommonParams.Q );
// Find documents MoreLikeThis - either with a reader or a query
//--------------------------------------------------------------------------------
if( reader != null ) {
mltDocs = mlt.getMoreLikeThis( reader, start, rows, filters, interesting, flags );
}
else if( q != null ) {
// Matching options
boolean includeMatch = params.getBool( MoreLikeThisParams.MATCH_INCLUDE, true );
int matchOffset = params.getInt( MoreLikeThisParams.MATCH_OFFSET, 0 );
// Find the base match
Query query = QueryParsing.parseQuery(q, params.get(CommonParams.DF), params, req.getSchema());
DocList match = searcher.getDocList(query, null, null, matchOffset, 1, flags ); // only get the first one...
if( includeMatch ) {
rsp.add( "match", match );
// Parse Required Params
// This will either have a single Reader or valid query
Reader reader = null;
try {
if (q == null || q.trim().length() < 1) {
Iterable<ContentStream> streams = req.getContentStreams();
if (streams != null) {
Iterator<ContentStream> iter = streams.iterator();
if (iter.hasNext()) {
reader = iter.next().getReader();
}
if (iter.hasNext()) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"MoreLikeThis does not support multiple ContentStreams");
}
}
}
// This is an iterator, but we only handle the first match
DocIterator iterator = match.iterator();
if( iterator.hasNext() ) {
// do a MoreLikeThis query for each document in results
int id = iterator.nextDoc();
mltDocs = mlt.getMoreLikeThis( id, start, rows, filters, interesting, flags );
// What fields do we need to return
String fl = params.get(CommonParams.FL);
int flags = 0;
if (fl != null) {
flags |= SolrPluginUtils.setReturnFields(fl, rsp);
}
int start = params.getInt(CommonParams.START, 0);
int rows = params.getInt(CommonParams.ROWS, 10);
// Find documents MoreLikeThis - either with a reader or a query
// --------------------------------------------------------------------------------
if (reader != null) {
mltDocs = mlt.getMoreLikeThis(reader, start, rows, filters,
interesting, flags);
} else if (q != null) {
// Matching options
boolean includeMatch = params.getBool(MoreLikeThisParams.MATCH_INCLUDE,
true);
int matchOffset = params.getInt(MoreLikeThisParams.MATCH_OFFSET, 0);
// Find the base match
Query query = QueryParsing.parseQuery(q, params.get(CommonParams.DF),
params, req.getSchema());
DocList match = searcher.getDocList(query, null, null, matchOffset, 1,
flags); // only get the first one...
if (includeMatch) {
rsp.add("match", match);
}
// This is an iterator, but we only handle the first match
DocIterator iterator = match.iterator();
if (iterator.hasNext()) {
// do a MoreLikeThis query for each document in results
int id = iterator.nextDoc();
mltDocs = mlt.getMoreLikeThis(id, start, rows, filters, interesting,
flags);
}
} else {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
"MoreLikeThis requires either a query (?q=) or text to find similar documents.");
}
} finally {
if (reader != null) {
reader.close();
}
}
else {
throw new SolrException( SolrException.ErrorCode.BAD_REQUEST,
"MoreLikeThis requires either a query (?q=) or text to find similar documents." );
}
if( mltDocs == null ) {
mltDocs = new DocListAndSet(); // avoid NPE
}

View File

@ -18,6 +18,7 @@
package org.apache.solr.response;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import org.apache.commons.io.IOUtils;
@ -81,7 +82,12 @@ public class RawResponseWriter implements QueryResponseWriter
if( obj != null && (obj instanceof ContentStream ) ) {
// copy the contents to the writer...
ContentStream content = (ContentStream)obj;
IOUtils.copy( content.getReader(), writer );
Reader reader = content.getReader();
try {
IOUtils.copy( reader, writer );
} finally {
reader.close();
}
}
else {
getBaseWriter( request ).write( writer, request, response );

View File

@ -20,6 +20,7 @@ package org.apache.solr.client.solrj.impl;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Reader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
@ -246,6 +247,7 @@ public class CommonsHttpSolrServer extends SolrServer
public NamedList<Object> request(final SolrRequest request, ResponseParser processor) throws SolrServerException, IOException {
HttpMethod method = null;
InputStream is = null;
SolrParams params = request.getParams();
Collection<ContentStream> streams = requestWriter.getContentStreams(request);
String path = requestWriter.getPath(request);
@ -333,7 +335,12 @@ public class CommonsHttpSolrServer extends SolrServer
@Override
protected void sendData(OutputStream out)
throws IOException {
IOUtils.copy(c.getReader(), out);
Reader reader = c.getReader();
try {
IOUtils.copy(reader, out);
} finally {
reader.close();
}
}
});
}
@ -378,7 +385,8 @@ public class CommonsHttpSolrServer extends SolrServer
);
} else {
post.setRequestEntity(new InputStreamRequestEntity(contentStream[0].getStream(), contentStream[0].getContentType()));
is = contentStream[0].getStream();
post.setRequestEntity(new InputStreamRequestEntity(is, contentStream[0].getContentType()));
}
method = post;
}
@ -391,6 +399,9 @@ public class CommonsHttpSolrServer extends SolrServer
// This is generally safe to retry on
method.releaseConnection();
method = null;
if(is != null) {
is.close();
}
// If out of tries then just rethrow (as normal error).
if( ( tries < 1 ) ) {
throw r;
@ -473,6 +484,9 @@ public class CommonsHttpSolrServer extends SolrServer
}
finally {
method.releaseConnection();
if(is != null) {
is.close();
}
}
}