SOLR-6932: All HttpClient ConnectionManagers and SolrJ clients should always be shutdown in tests and regular code.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1650608 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mark Robert Miller 2015-01-09 17:33:04 +00:00
parent a2614fbd6d
commit b870aed9b9
79 changed files with 1417 additions and 1015 deletions

View File

@ -649,6 +649,9 @@ Other Changes
* SOLR-6918: No need to log exceptions (as warn) generated when creating MBean stats if
the core is shutting down (Timothy Potter)
* SOLR-6932: All HttpClient ConnectionManagers and SolrJ clients should always be shutdown
in tests and regular code. (Mark Miller)
================== 4.10.3 ==================
Bug Fixes

View File

@ -17,9 +17,19 @@ package org.apache.solr.handler.dataimport;
* limitations under the License.
*/
import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import org.apache.http.client.HttpClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
@ -31,16 +41,6 @@ import org.apache.solr.common.params.CommonParams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import static org.apache.solr.handler.dataimport.DataImportHandlerException.SEVERE;
import static org.apache.solr.handler.dataimport.DataImportHandlerException.wrapAndThrow;
/**
* <p>
* An implementation of {@link EntityProcessor} which fetches values from a
@ -71,6 +71,15 @@ public class SolrEntityProcessor extends EntityProcessorBase {
private String[] fields;
private String requestHandler;// 'qt' param
private int timeout = TIMEOUT_SECS;
@Override
public void destroy() {
try {
solrClient.shutdown();
} finally {
HttpClientUtil.close(((HttpSolrClient) solrClient).getHttpClient());
}
}
/**
* Factory method that returns a {@link HttpClient} instance used for interfacing with a source Solr service.
@ -94,7 +103,6 @@ public class SolrEntityProcessor extends EntityProcessorBase {
"SolrEntityProcessor: parameter 'url' is required");
}
// TODO: we should close this client!
HttpClient client = getHttpClient();
URL url = new URL(serverPath);
// (wt="javabin|xml") default is javabin

View File

@ -67,17 +67,20 @@ public class TestContentStreamDataSource extends AbstractDataImportHandlerTestCa
params.set("command", "full-import");
params.set("clean", "false");
req.setParams(params);
HttpSolrClient solrServer = new HttpSolrClient(buildUrl(jetty.getLocalPort(), "/solr"));
solrServer.request(req);
ModifiableSolrParams qparams = new ModifiableSolrParams();
qparams.add("q", "*:*");
QueryResponse qres = solrServer.query(qparams);
SolrDocumentList results = qres.getResults();
assertEquals(2, results.getNumFound());
SolrDocument doc = results.get(0);
assertEquals("1", doc.getFieldValue("id"));
assertEquals("Hello C1", ((List)doc.getFieldValue("desc")).get(0));
solrServer.shutdown();
HttpSolrClient solrClient = new HttpSolrClient(buildUrl(jetty.getLocalPort(), "/solr"));
try {
solrClient.request(req);
ModifiableSolrParams qparams = new ModifiableSolrParams();
qparams.add("q", "*:*");
QueryResponse qres = solrClient.query(qparams);
SolrDocumentList results = qres.getResults();
assertEquals(2, results.getNumFound());
SolrDocument doc = results.get(0);
assertEquals("1", doc.getFieldValue("id"));
assertEquals("Hello C1", ((List)doc.getFieldValue("desc")).get(0));
} finally {
solrClient.shutdown();
}
}
@Test

View File

@ -32,9 +32,12 @@ public class TestSolrEntityProcessorUnit extends AbstractDataImportHandlerTestCa
List<Doc> docs = generateUniqueDocs(2);
MockSolrEntityProcessor processor = createAndInit(docs);
assertExpectedDocs(docs, processor);
assertEquals(1, processor.getQueryCount());
try {
assertExpectedDocs(docs, processor);
assertEquals(1, processor.getQueryCount());
} finally {
processor.destroy();
}
}
private MockSolrEntityProcessor createAndInit(List<Doc> docs) {
@ -46,8 +49,12 @@ public class TestSolrEntityProcessorUnit extends AbstractDataImportHandlerTestCa
int rowsNum = 10;
MockSolrEntityProcessor processor = createAndInit(docs, rowsNum);
assertExpectedDocs(docs, processor);
assertEquals(5, processor.getQueryCount());
try {
assertExpectedDocs(docs, processor);
assertEquals(5, processor.getQueryCount());
} finally {
processor.destroy();
}
}
private MockSolrEntityProcessor createAndInit(List<Doc> docs, int rowsNum) {
@ -67,15 +74,19 @@ public class TestSolrEntityProcessorUnit extends AbstractDataImportHandlerTestCa
docs.add(testDoc);
MockSolrEntityProcessor processor = createAndInit(docs);
Map<String, Object> next = processor.nextRow();
assertNotNull(next);
@SuppressWarnings("unchecked")
List<Comparable> multiField = (List<Comparable>) next.get("description");
assertEquals(testDoc.getValues("description").size(), multiField.size());
assertEquals(testDoc.getValues("description"), multiField);
assertEquals(1, processor.getQueryCount());
assertNull(processor.nextRow());
try {
Map<String, Object> next = processor.nextRow();
assertNotNull(next);
@SuppressWarnings("unchecked")
List<Comparable> multiField = (List<Comparable>) next.get("description");
assertEquals(testDoc.getValues("description").size(), multiField.size());
assertEquals(testDoc.getValues("description"), multiField);
assertEquals(1, processor.getQueryCount());
assertNull(processor.nextRow());
} finally {
processor.destroy();
}
}
private List<Doc> generateUniqueDocs(int numDocs) {

View File

@ -58,7 +58,7 @@ import org.apache.solr.common.params.CollectionParams;
import org.apache.solr.core.ConfigSolr;
import org.apache.solr.handler.component.ShardHandler;
import org.apache.solr.update.UpdateShardHandler;
import org.apache.solr.util.IOUtils;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.util.stats.Clock;
import org.apache.solr.util.stats.Timer;
import org.apache.solr.util.stats.TimerContext;

View File

@ -195,7 +195,12 @@ public class CoreContainer {
public static CoreContainer createAndLoad(String solrHome, File configFile) {
SolrResourceLoader loader = new SolrResourceLoader(solrHome);
CoreContainer cc = new CoreContainer(loader, ConfigSolr.fromFile(loader, configFile));
cc.load();
try {
cc.load();
} catch (Exception e) {
cc.shutdown();
throw e;
}
return cc;
}
@ -341,10 +346,9 @@ public class CoreContainer {
}
try {
coreAdminHandler.shutdown();
if (coreAdminHandler != null) coreAdminHandler.shutdown();
} catch (Exception e) {
log.warn("Error shutting down CoreAdminHandler. Continuing to close CoreContainer.");
e.printStackTrace();
log.warn("Error shutting down CoreAdminHandler. Continuing to close CoreContainer.", e);
}
try {

View File

@ -23,7 +23,7 @@ import org.apache.commons.lang.StringUtils;
import org.apache.solr.cloud.CloudDescriptor;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.util.IOUtils;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.util.PropertiesUtil;
import java.io.File;

View File

@ -18,8 +18,9 @@ package org.apache.solr.core;
*/
import com.google.common.collect.Lists;
import org.apache.solr.common.SolrException;
import org.apache.solr.util.IOUtils;
import org.apache.solr.common.util.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -38,6 +38,7 @@ import org.apache.solr.cloud.ZkController;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.store.blockcache.BlockCache;
import org.apache.solr.store.blockcache.BlockDirectory;
@ -48,7 +49,6 @@ import org.apache.solr.store.blockcache.Metrics;
import org.apache.solr.store.hdfs.HdfsDirectory;
import org.apache.solr.store.hdfs.HdfsLockFactory;
import org.apache.solr.util.HdfsUtil;
import org.apache.solr.util.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -40,7 +40,7 @@ public class PluginInfo implements MapSerializable{
public final List<PluginInfo> children;
private boolean isFromSolrConfig;
public PluginInfo(String type, Map<String, String> attrs ,NamedList initArgs, List<PluginInfo> children) {
public PluginInfo(String type, Map<String, String> attrs, NamedList initArgs, List<PluginInfo> children) {
this.type = type;
this.name = attrs.get(NAME);
this.className = attrs.get(CLASS_NAME);

View File

@ -17,6 +17,47 @@
package org.apache.solr.core;
import java.io.Closeable;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Writer;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.NoSuchFileException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.io.FileUtils;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.index.DirectoryReader;
@ -38,6 +79,7 @@ import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.CommonParams.EchoParamStyle;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.DirectoryFactory.DirContext;
@ -97,7 +139,6 @@ import org.apache.solr.update.processor.RunUpdateProcessorFactory;
import org.apache.solr.update.processor.UpdateRequestProcessorChain;
import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.IOUtils;
import org.apache.solr.util.PropertiesInputStream;
import org.apache.solr.util.RefCounted;
import org.apache.solr.util.plugin.NamedListInitializedPlugin;
@ -109,46 +150,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import java.io.Closeable;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Writer;
import java.lang.reflect.Constructor;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.NoSuchFileException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantLock;
/**
*
*/

View File

@ -16,42 +16,25 @@
*/
package org.apache.solr.handler;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.HttpClient;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.FastInputStream;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.core.DirectoryFactory.DirContext;
import org.apache.solr.core.IndexDeletionPolicyWrapper;
import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.ReplicationHandler.FileInfo;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.FileUtils;
import org.apache.solr.util.PropertiesInputStream;
import org.apache.solr.util.PropertiesOutputStream;
import org.apache.solr.util.RefCounted;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.solr.handler.ReplicationHandler.ALIAS;
import static org.apache.solr.handler.ReplicationHandler.CHECKSUM;
import static org.apache.solr.handler.ReplicationHandler.CMD_DETAILS;
import static org.apache.solr.handler.ReplicationHandler.CMD_GET_FILE;
import static org.apache.solr.handler.ReplicationHandler.CMD_GET_FILE_LIST;
import static org.apache.solr.handler.ReplicationHandler.CMD_INDEX_VERSION;
import static org.apache.solr.handler.ReplicationHandler.COMMAND;
import static org.apache.solr.handler.ReplicationHandler.COMPRESSION;
import static org.apache.solr.handler.ReplicationHandler.CONF_FILES;
import static org.apache.solr.handler.ReplicationHandler.CONF_FILE_SHORT;
import static org.apache.solr.handler.ReplicationHandler.EXTERNAL;
import static org.apache.solr.handler.ReplicationHandler.FILE;
import static org.apache.solr.handler.ReplicationHandler.FILE_STREAM;
import static org.apache.solr.handler.ReplicationHandler.GENERATION;
import static org.apache.solr.handler.ReplicationHandler.INTERNAL;
import static org.apache.solr.handler.ReplicationHandler.MASTER_URL;
import static org.apache.solr.handler.ReplicationHandler.NAME;
import static org.apache.solr.handler.ReplicationHandler.OFFSET;
import static org.apache.solr.handler.ReplicationHandler.SIZE;
import java.io.File;
import java.io.FileNotFoundException;
@ -91,25 +74,41 @@ import java.util.zip.Adler32;
import java.util.zip.Checksum;
import java.util.zip.InflaterInputStream;
import static org.apache.solr.handler.ReplicationHandler.ALIAS;
import static org.apache.solr.handler.ReplicationHandler.CHECKSUM;
import static org.apache.solr.handler.ReplicationHandler.CMD_DETAILS;
import static org.apache.solr.handler.ReplicationHandler.CMD_GET_FILE;
import static org.apache.solr.handler.ReplicationHandler.CMD_GET_FILE_LIST;
import static org.apache.solr.handler.ReplicationHandler.CMD_INDEX_VERSION;
import static org.apache.solr.handler.ReplicationHandler.COMMAND;
import static org.apache.solr.handler.ReplicationHandler.COMPRESSION;
import static org.apache.solr.handler.ReplicationHandler.CONF_FILES;
import static org.apache.solr.handler.ReplicationHandler.CONF_FILE_SHORT;
import static org.apache.solr.handler.ReplicationHandler.EXTERNAL;
import static org.apache.solr.handler.ReplicationHandler.FILE;
import static org.apache.solr.handler.ReplicationHandler.FILE_STREAM;
import static org.apache.solr.handler.ReplicationHandler.GENERATION;
import static org.apache.solr.handler.ReplicationHandler.INTERNAL;
import static org.apache.solr.handler.ReplicationHandler.MASTER_URL;
import static org.apache.solr.handler.ReplicationHandler.NAME;
import static org.apache.solr.handler.ReplicationHandler.OFFSET;
import static org.apache.solr.handler.ReplicationHandler.SIZE;
import org.apache.commons.io.IOUtils;
import org.apache.http.client.HttpClient;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.FastInputStream;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.core.DirectoryFactory.DirContext;
import org.apache.solr.core.IndexDeletionPolicyWrapper;
import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.ReplicationHandler.FileInfo;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.update.CommitUpdateCommand;
import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.FileUtils;
import org.apache.solr.util.PropertiesInputStream;
import org.apache.solr.util.PropertiesOutputStream;
import org.apache.solr.util.RefCounted;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <p/> Provides functionality of downloading changed index files as well as config files and a timer for scheduling fetches from the
@ -183,7 +182,6 @@ public class SnapPuller {
public SnapPuller(final NamedList initArgs, final ReplicationHandler handler, final SolrCore sc) {
solrCore = sc;
final SolrParams params = SolrParams.toSolrParams(initArgs);
String masterUrl = (String) initArgs.get(MASTER_URL);
if (masterUrl == null)
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,

View File

@ -17,9 +17,37 @@
package org.apache.solr.servlet;
import java.io.ByteArrayInputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.http.client.HttpClient;
import org.apache.http.Header;
import org.apache.http.HeaderIterator;
import org.apache.http.HttpEntity;
import org.apache.http.HttpEntityEnclosingRequest;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpEntityEnclosingRequestBase;
import org.apache.http.client.methods.HttpGet;
@ -28,13 +56,10 @@ import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.entity.InputStreamEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.util.EntityUtils;
import org.apache.http.Header;
import org.apache.http.HeaderIterator;
import org.apache.http.HttpEntity;
import org.apache.http.HttpEntityEnclosingRequest;
import org.apache.http.HttpResponse;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.Aliases;
@ -57,7 +82,6 @@ import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.handler.ContentStreamHandlerBase;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrQueryRequestBase;
@ -72,31 +96,6 @@ import org.apache.solr.update.processor.DistributingUpdateProcessorFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.ByteArrayInputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* This filter looks at the incoming URL maps them to handlers defined in solrconfig.xml
*
@ -113,7 +112,7 @@ public class SolrDispatchFilter extends BaseSolrFilter {
protected String pathPrefix = null; // strip this from the beginning of a path
protected String abortErrorMessage = null;
protected final HttpClient httpClient = HttpClientUtil.createClient(new ModifiableSolrParams());
protected final CloseableHttpClient httpClient = HttpClientUtil.createClient(new ModifiableSolrParams());
public SolrDispatchFilter() {
}
@ -190,10 +189,14 @@ public class SolrDispatchFilter extends BaseSolrFilter {
@Override
public void destroy() {
if (cores != null) {
cores.shutdown();
cores = null;
}
try {
if (cores != null) {
cores.shutdown();
cores = null;
}
} finally {
IOUtils.closeQuietly(httpClient);
}
}
@Override
@ -527,7 +530,6 @@ public class SolrDispatchFilter extends BaseSolrFilter {
urlstr += queryString == null ? "" : "?" + queryString;
URL url = new URL(urlstr);
boolean isPostOrPutRequest = "POST".equals(req.getMethod()) || "PUT".equals(req.getMethod());
if ("GET".equals(req.getMethod())) {

View File

@ -85,11 +85,11 @@ public class HdfsDirectory extends BaseDirectory {
}
continue;
}
org.apache.solr.util.IOUtils.closeQuietly(fileSystem);
org.apache.solr.common.util.IOUtils.closeQuietly(fileSystem);
throw new RuntimeException(
"Problem creating directory: " + hdfsDirPath, e);
} catch (Exception e) {
org.apache.solr.util.IOUtils.closeQuietly(fileSystem);
org.apache.solr.common.util.IOUtils.closeQuietly(fileSystem);
throw new RuntimeException(
"Problem creating directory: " + hdfsDirPath, e);
}

View File

@ -29,7 +29,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockFactory;
import org.apache.lucene.store.LockReleaseFailedException;
import org.apache.solr.util.IOUtils;
import org.apache.solr.common.util.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -35,10 +35,10 @@ import org.apache.hadoop.ipc.RemoteException;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.core.PluginInfo;
import org.apache.solr.core.SolrCore;
import org.apache.solr.util.HdfsUtil;
import org.apache.solr.util.IOUtils;
/** @lucene.experimental */
public class HdfsUpdateLog extends UpdateLog {

View File

@ -296,20 +296,20 @@ public class PeerSync {
boolean connectTimeoutExceptionInChain = connectTimeoutExceptionInChain(srsp.getException());
if (connectTimeoutExceptionInChain || solrException instanceof ConnectException || solrException instanceof ConnectTimeoutException
|| solrException instanceof NoHttpResponseException || solrException instanceof SocketException) {
log.warn(msg() + " couldn't connect to " + srsp.getShardAddress() + ", counting as success");
log.warn(msg() + " couldn't connect to " + srsp.getShardAddress() + ", counting as success", srsp.getException());
return true;
}
}
if (cantReachIsSuccess && sreq.purpose == 1 && srsp.getException() instanceof SolrException && ((SolrException) srsp.getException()).code() == 503) {
log.warn(msg() + " got a 503 from " + srsp.getShardAddress() + ", counting as success");
log.warn(msg() + " got a 503 from " + srsp.getShardAddress() + ", counting as success", srsp.getException());
return true;
}
if (cantReachIsSuccess && sreq.purpose == 1 && srsp.getException() instanceof SolrException && ((SolrException) srsp.getException()).code() == 404) {
log.warn(msg() + " got a 404 from " + srsp.getShardAddress() + ", counting as success. " +
"Perhaps /get is not registered?");
"Perhaps /get is not registered?", srsp.getException());
return true;
}

View File

@ -26,10 +26,10 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.InfoStream;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.core.DirectoryFactory.DirContext;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.util.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -22,12 +22,14 @@ import java.util.concurrent.Executors;
import org.apache.http.client.HttpClient;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.impl.conn.SchemeRegistryFactory;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import org.apache.solr.core.ConfigSolr;
import org.slf4j.Logger;
@ -42,7 +44,7 @@ public class UpdateShardHandler {
private PoolingClientConnectionManager clientConnectionManager;
private final HttpClient client;
private final CloseableHttpClient client;
public UpdateShardHandler(ConfigSolr cfg) {
@ -52,7 +54,6 @@ public class UpdateShardHandler {
clientConnectionManager.setDefaultMaxPerRoute(cfg.getMaxUpdateConnectionsPerHost());
}
ModifiableSolrParams params = new ModifiableSolrParams();
if (cfg != null) {
params.set(HttpClientUtil.PROP_SO_TIMEOUT,
@ -84,6 +85,7 @@ public class UpdateShardHandler {
} catch (Exception e) {
SolrException.log(log, e);
} finally {
IOUtils.closeQuietly(client);
clientConnectionManager.shutdown();
}
}

View File

@ -784,7 +784,7 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor {
} else {
if (log.isWarnEnabled()) {
for (Error error : errors) {
log.warn("Error sending update", error.e);
log.warn("Error sending update to " + error.req.node.getBaseUrl(), error.e);
}
}
}

View File

@ -36,6 +36,7 @@ import org.apache.http.client.ResponseHandler;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.util.EntityUtils;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
@ -379,7 +380,7 @@ public class SolrCLI {
return wasCommError;
}
public static HttpClient getHttpClient() {
public static CloseableHttpClient getHttpClient() {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(HttpClientUtil.PROP_MAX_CONNECTIONS, 128);
params.set(HttpClientUtil.PROP_MAX_CONNECTIONS_PER_HOST, 32);
@ -388,10 +389,10 @@ public class SolrCLI {
}
@SuppressWarnings("deprecation")
public static void closeHttpClient(HttpClient httpClient) {
public static void closeHttpClient(CloseableHttpClient httpClient) {
if (httpClient != null) {
try {
httpClient.getConnectionManager().shutdown();
HttpClientUtil.close(httpClient);
} catch (Exception exc) {
// safe to ignore, we're just shutting things down
}
@ -403,7 +404,7 @@ public class SolrCLI {
*/
public static Map<String,Object> getJson(String getUrl) throws Exception {
Map<String,Object> json = null;
HttpClient httpClient = getHttpClient();
CloseableHttpClient httpClient = getHttpClient();
try {
json = getJson(httpClient, getUrl, 2);
} finally {
@ -595,7 +596,7 @@ public class SolrCLI {
int exitCode = 0;
String systemInfoUrl = solrUrl+"admin/info/system";
HttpClient httpClient = getHttpClient();
CloseableHttpClient httpClient = getHttpClient();
try {
// hit Solr to get system info
Map<String,Object> systemInfo = getJson(httpClient, systemInfoUrl, 2);
@ -1075,7 +1076,7 @@ public class SolrCLI {
solrUrl += "/";
String systemInfoUrl = solrUrl+"admin/info/system";
HttpClient httpClient = getHttpClient();
CloseableHttpClient httpClient = getHttpClient();
try {
// hit Solr to get system info
Map<String,Object> systemInfo = getJson(httpClient, systemInfoUrl, 2);
@ -1098,7 +1099,7 @@ public class SolrCLI {
}
zkHost = zookeeper;
} finally {
closeHttpClient(httpClient);
HttpClientUtil.close(httpClient);
}
}
@ -1319,7 +1320,7 @@ public class SolrCLI {
String coreName = cli.getOptionValue("name");
String systemInfoUrl = solrUrl+"admin/info/system";
HttpClient httpClient = getHttpClient();
CloseableHttpClient httpClient = getHttpClient();
String solrHome = null;
try {
Map<String,Object> systemInfo = getJson(httpClient, systemInfoUrl, 2);

View File

@ -17,6 +17,10 @@ package org.apache.solr.cloud;
* limitations under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
@ -38,10 +42,6 @@ import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Test sync phase that occurs when Leader goes down and a new Leader is
* elected.
@ -179,7 +179,8 @@ public class AliasIntegrationTest extends AbstractFullDistribZkTestBase {
client = new HttpSolrClient(buildUrl(port) + "/testalias");
res = client.query(query);
assertEquals(5, res.getResults().getNumFound());
client.shutdown();
client = null;
// now without collections param
query = new SolrQuery("*:*");
@ -213,26 +214,32 @@ public class AliasIntegrationTest extends AbstractFullDistribZkTestBase {
// try a std client
// search 1 and 2, but have no collections param
query = new SolrQuery("*:*");
HttpSolrClient httpclient = new HttpSolrClient(getBaseUrl((HttpSolrClient) clients.get(0)) + "/testalias");
res = httpclient.query(query);
assertEquals(5, res.getResults().getNumFound());
httpclient.shutdown();
httpclient = null;
client = new HttpSolrClient(getBaseUrl((HttpSolrClient) clients.get(0)) + "/testalias");
try {
res = client.query(query);
assertEquals(5, res.getResults().getNumFound());
} finally {
client.shutdown();
client = null;
}
createAlias("testalias", "collection2");
// a second alias
createAlias("testalias2", "collection2");
httpclient = new HttpSolrClient(getBaseUrl((HttpSolrClient) clients.get(0)) + "/testalias");
SolrInputDocument doc8 = getDoc(id, 11, i1, -600, tlong, 600, t1,
"humpty dumpy4 sat on a walls");
httpclient.add(doc8);
httpclient.commit();
res = httpclient.query(query);
assertEquals(3, res.getResults().getNumFound());
httpclient.shutdown();
httpclient = null;
client = new HttpSolrClient(getBaseUrl((HttpSolrClient) clients.get(0)) + "/testalias");
try {
SolrInputDocument doc8 = getDoc(id, 11, i1, -600, tlong, 600, t1,
"humpty dumpy4 sat on a walls");
client.add(doc8);
client.commit();
res = client.query(query);
assertEquals(3, res.getResults().getNumFound());
} finally {
client.shutdown();
client = null;
}
createAlias("testalias", "collection2,collection1");
@ -257,21 +264,24 @@ public class AliasIntegrationTest extends AbstractFullDistribZkTestBase {
throws SolrServerException, IOException {
SolrClient client = createNewSolrClient("",
getBaseUrl((HttpSolrClient) clients.get(0)));
if (random().nextBoolean()) {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("collections", collections);
params.set("name", alias);
params.set("action", CollectionAction.CREATEALIAS.toString());
QueryRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
client.request(request);
} else {
CreateAlias request = new CreateAlias();
request.setAliasName(alias);
request.setAliasedCollections(collections);
request.process(client);
try {
if (random().nextBoolean()) {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("collections", collections);
params.set("name", alias);
params.set("action", CollectionAction.CREATEALIAS.toString());
QueryRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
client.request(request);
} else {
CreateAlias request = new CreateAlias();
request.setAliasName(alias);
request.setAliasedCollections(collections);
request.process(client);
}
} finally {
client.shutdown();
}
client.shutdown();
}
private void deleteAlias(String alias) throws SolrServerException,

View File

@ -19,6 +19,7 @@ package org.apache.solr.cloud;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
import org.apache.solr.client.solrj.SolrClient;
@ -29,6 +30,7 @@ import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.core.Diagnostics;
import org.apache.solr.update.SolrCmdDistributor;
import org.junit.After;
@ -46,7 +48,7 @@ import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
@Slow
@SuppressSSL
@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
@ThreadLeakLingering(linger = 60000)
public class ChaosMonkeyNothingIsSafeTest extends AbstractFullDistribZkTestBase {
private static final int FAIL_TOLERANCE = 20;
@ -290,7 +292,7 @@ public class ChaosMonkeyNothingIsSafeTest extends AbstractFullDistribZkTestBase
}
class FullThrottleStopableIndexingThread extends StopableIndexingThread {
private HttpClient httpClient = HttpClientUtil.createClient(null);
private CloseableHttpClient httpClient = HttpClientUtil.createClient(null);
private volatile boolean stop = false;
int clientIndex = 0;
private ConcurrentUpdateSolrClient cusc;
@ -389,7 +391,7 @@ public class ChaosMonkeyNothingIsSafeTest extends AbstractFullDistribZkTestBase
stop = true;
cusc.blockUntilFinished();
cusc.shutdownNow();
httpClient.getConnectionManager().shutdown();
IOUtils.closeQuietly(httpClient);
}
@Override

View File

@ -70,48 +70,51 @@ public class CollectionsAPIAsyncDistributedZkTest extends AbstractFullDistribZkT
private void testSolrJAPICalls() throws Exception {
SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
Create createCollectionRequest = new Create();
createCollectionRequest.setCollectionName("testasynccollectioncreation");
createCollectionRequest.setNumShards(1);
createCollectionRequest.setConfigName("conf1");
createCollectionRequest.setAsyncId("1001");
createCollectionRequest.process(client);
String state = getRequestStateAfterCompletion("1001", MAX_TIMEOUT_SECONDS, client);
assertEquals("CreateCollection task did not complete!", "completed", state);
createCollectionRequest = new Create();
createCollectionRequest.setCollectionName("testasynccollectioncreation");
createCollectionRequest.setNumShards(1);
createCollectionRequest.setConfigName("conf1");
createCollectionRequest.setAsyncId("1002");
createCollectionRequest.process(client);
state = getRequestStateAfterCompletion("1002", MAX_TIMEOUT_SECONDS, client);
assertEquals("Recreating a collection with the same name didn't fail, should have.", "failed", state);
CollectionAdminRequest.AddReplica addReplica = new CollectionAdminRequest.AddReplica();
addReplica.setCollectionName("testasynccollectioncreation");
addReplica.setShardName("shard1");
addReplica.setAsyncId("1003");
client.request(addReplica);
state = getRequestStateAfterCompletion("1003", MAX_TIMEOUT_SECONDS, client);
assertEquals("Add replica did not complete", "completed", state);
SplitShard splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("testasynccollectioncreation");
splitShardRequest.setShardName("shard1");
splitShardRequest.setAsyncId("1004");
splitShardRequest.process(client);
state = getRequestStateAfterCompletion("1004", MAX_TIMEOUT_SECONDS * 2, client);
assertEquals("Shard split did not complete. Last recorded state: " + state, "completed", state);
try {
Create createCollectionRequest = new Create();
createCollectionRequest.setCollectionName("testasynccollectioncreation");
createCollectionRequest.setNumShards(1);
createCollectionRequest.setConfigName("conf1");
createCollectionRequest.setAsyncId("1001");
createCollectionRequest.process(client);
String state = getRequestStateAfterCompletion("1001", MAX_TIMEOUT_SECONDS, client);
assertEquals("CreateCollection task did not complete!", "completed", state);
createCollectionRequest = new Create();
createCollectionRequest.setCollectionName("testasynccollectioncreation");
createCollectionRequest.setNumShards(1);
createCollectionRequest.setConfigName("conf1");
createCollectionRequest.setAsyncId("1002");
createCollectionRequest.process(client);
state = getRequestStateAfterCompletion("1002", MAX_TIMEOUT_SECONDS, client);
assertEquals("Recreating a collection with the same name didn't fail, should have.", "failed", state);
CollectionAdminRequest.AddReplica addReplica = new CollectionAdminRequest.AddReplica();
addReplica.setCollectionName("testasynccollectioncreation");
addReplica.setShardName("shard1");
addReplica.setAsyncId("1003");
client.request(addReplica);
state = getRequestStateAfterCompletion("1003", MAX_TIMEOUT_SECONDS, client);
assertEquals("Add replica did not complete", "completed", state);
SplitShard splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("testasynccollectioncreation");
splitShardRequest.setShardName("shard1");
splitShardRequest.setAsyncId("1004");
splitShardRequest.process(client);
state = getRequestStateAfterCompletion("1004", MAX_TIMEOUT_SECONDS * 2, client);
assertEquals("Shard split did not complete. Last recorded state: " + state, "completed", state);
} finally {
client.shutdown();
}
}
private String getRequestStateAfterCompletion(String requestId, int waitForSeconds, SolrClient client)

View File

@ -19,6 +19,7 @@ package org.apache.solr.cloud;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.lucene.util.TestUtil;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServerException;
@ -63,6 +64,7 @@ import org.junit.BeforeClass;
import javax.management.MBeanServer;
import javax.management.MBeanServerFactory;
import javax.management.ObjectName;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
@ -231,8 +233,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
QueryRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
try {
NamedList<Object> resp = createNewSolrClient("", baseUrl)
.request(request);
makeRequest(baseUrl, request);
fail("Expected to fail, because collection is not in clusterstate");
} catch (RemoteSolrException e) {
@ -256,7 +257,8 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
if (secondConfigSet) {
createCmd.setCollectionConfigName("conf1");
}
createNewSolrClient("", baseUrl).request(createCmd);
makeRequest(baseUrl, createCmd);
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionAction.DELETE.toString());
@ -264,7 +266,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
QueryRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
NamedList<Object> resp = createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
checkForMissingCollection(collectionName);
@ -278,7 +280,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
if (secondConfigSet) {
params.set("collection.configName", "conf1");
}
resp = createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
}
@ -286,11 +288,21 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
// now try to remove a collection when a couple of its nodes are down
if (secondConfigSet) {
createCollection(null, "halfdeletedcollection2", 3, 3, 6,
createNewSolrClient("", baseUrl), null, "conf2");
SolrClient client = createNewSolrClient("", baseUrl);
try {
createCollection(null, "halfdeletedcollection2", 3, 3, 6, client, null,
"conf2");
} finally {
client.shutdown();
}
} else {
createCollection(null, "halfdeletedcollection2", 3, 3, 6,
createNewSolrClient("", baseUrl), null);
SolrClient client = createNewSolrClient("", baseUrl);
try {
createCollection(null, "halfdeletedcollection2", 3, 3, 6,
client, null);
} finally {
client.shutdown();
}
}
waitForRecoveriesToFinish("halfdeletedcollection2", false);
@ -313,7 +325,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
QueryRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
long timeout = System.currentTimeMillis() + 10000;
while (cloudClient.getZkStateReader().getClusterState().hasCollection("halfdeletedcollection2")) {
@ -329,6 +341,16 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
}
private NamedList<Object> makeRequest(String baseUrl, SolrRequest request)
throws SolrServerException, IOException {
SolrClient client = createNewSolrClient("", baseUrl);
try {
return client.request(request);
} finally {
client.shutdown();
}
}
private void testErrorHandling() throws Exception {
final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
@ -342,9 +364,8 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
QueryRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
boolean gotExp = false;
NamedList<Object> resp = null;
try {
resp = createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
} catch (SolrException e) {
gotExp = true;
}
@ -364,9 +385,8 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
request = new QueryRequest(params);
request.setPath("/admin/collections");
gotExp = false;
resp = null;
try {
resp = createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
} catch (SolrException e) {
gotExp = true;
}
@ -386,7 +406,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
request.setPath("/admin/collections");
gotExp = false;
try {
resp = createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
} catch (SolrException e) {
gotExp = true;
}
@ -404,9 +424,8 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
request = new QueryRequest(params);
request.setPath("/admin/collections");
gotExp = false;
resp = null;
try {
resp = createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
} catch (SolrException e) {
gotExp = true;
}
@ -425,9 +444,8 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
request = new QueryRequest(params);
request.setPath("/admin/collections");
gotExp = false;
resp = null;
try {
resp = createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
} catch (SolrException e) {
gotExp = true;
}
@ -446,7 +464,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
if (secondConfigSet) {
createCmd.setCollectionConfigName("conf1");
}
createNewSolrClient("", baseUrl).request(createCmd);
makeRequest(baseUrl, createCmd);
createCmd = new Create();
createCmd.setCoreName("halfcollection_shard1_replica1");
@ -457,7 +475,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
if (secondConfigSet) {
createCmd.setCollectionConfigName("conf1");
}
createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(1))).request(createCmd);
makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), createCmd);
params = new ModifiableSolrParams();
params.set("action", CollectionAction.CREATE.toString());
@ -477,7 +495,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
request = new QueryRequest(params);
request.setPath("/admin/collections");
gotExp = false;
resp = createNewSolrClient("", baseUrl).request(request);
NamedList<Object> resp = makeRequest(baseUrl, request);;
SimpleOrderedMap success = (SimpleOrderedMap) resp.get("success");
SimpleOrderedMap failure = (SimpleOrderedMap) resp.get("failure");
@ -507,15 +525,13 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
createCmd.setCollectionConfigName("conf1");
}
createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(1)))
.request(createCmd);
makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), createCmd);
// try and create a SolrCore with no collection name
createCmd.setCollection(null);
createCmd.setCoreName("corewithnocollection2");
createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(1)))
.request(createCmd);
makeRequest(getBaseUrl((HttpSolrClient) clients.get(1)), createCmd);
// in both cases, the collection should have default to the core name
cloudClient.getZkStateReader().updateClusterState(true);
@ -542,7 +558,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
QueryRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
List<Integer> numShardsNumReplicaList = new ArrayList<>();
numShardsNumReplicaList.add(2);
@ -743,7 +759,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
// we can use this client because we just want base url
final String baseUrl = getBaseUrl((HttpSolrClient) clients.get(0));
createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
// reloads make take a short while
boolean allTimesAreCorrect = waitForReloads(collectionName, urlToTimeBefore);
@ -759,7 +775,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
request = new QueryRequest(params);
request.setPath("/admin/collections");
createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
// ensure its out of the state
checkForMissingCollection(collectionName);
@ -775,7 +791,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
boolean exp = false;
try {
createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
} catch (SolrException e) {
exp = true;
}
@ -795,7 +811,7 @@ public class CollectionsAPIDistributedZkTest extends AbstractFullDistribZkTestBa
}
request = new QueryRequest(params);
request.setPath("/admin/collections");
createNewSolrClient("", baseUrl).request(request);
makeRequest(baseUrl, request);
List<Integer> list = new ArrayList<>(2);
list.add(1);

View File

@ -20,8 +20,8 @@ package org.apache.solr.cloud;
import static org.apache.solr.cloud.OverseerCollectionProcessor.NUM_SLICES;
import static org.apache.solr.cloud.OverseerCollectionProcessor.ROUTER;
import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARDS_PROP;
import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
import static org.apache.solr.common.cloud.ZkStateReader.REPLICATION_FACTOR;
import static org.apache.solr.common.params.ShardParams._ROUTE_;
import java.util.ArrayList;
@ -40,6 +40,8 @@ import java.util.concurrent.TimeUnit;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.lucene.util.TestUtil;
import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServerException;
@ -65,6 +67,7 @@ import org.junit.BeforeClass;
* Tests the Custom Sharding API.
*/
@Slow
@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
private static final String DEFAULT_COLLECTION = "collection1";
@ -223,135 +226,140 @@ public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
String url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
HttpSolrClient collectionClient = new HttpSolrClient(url);
// lets try and use the solrj client to index a couple documents
collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
"humpty dumpy sat on a wall", _ROUTE_,"a"));
collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
"humpty dumpy3 sat on a walls", _ROUTE_,"a"));
collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
"humpty dumpy2 sat on a walled", _ROUTE_,"a"));
collectionClient.commit();
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
collectionClient.deleteByQuery("*:*");
collectionClient.commit(true,true);
assertEquals(0, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
UpdateRequest up = new UpdateRequest();
up.setParam(_ROUTE_, "c");
up.setParam("commit","true");
up.add(getDoc(id, 9, i1, -600, tlong, 600, t1,
"humpty dumpy sat on a wall"));
up.add(getDoc(id, 10, i1, -600, tlong, 600, t1,
"humpty dumpy3 sat on a walls"));
up.add(getDoc(id, 11, i1, -600, tlong, 600, t1,
"humpty dumpy2 sat on a walled"));
collectionClient.request(up);
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"c")).getResults().getNumFound());
//Testing CREATESHARD
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionAction.CREATESHARD.toString());
params.set("collection", collectionName);
params.set("shard", "x");
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0))).request(request);
waitForCollection(zkStateReader,collectionName,4);
//wait for all the replicas to become active
int attempts = 0;
while(true){
if(attempts>30 ) fail("Not enough active replicas in the shard 'x'");
attempts++;
int activeReplicaCount = 0;
for (Replica x : zkStateReader.getClusterState().getCollection(collectionName).getSlice("x").getReplicas()) {
if("active".equals(x.getStr("state"))) activeReplicaCount++;
}
Thread.sleep(500);
if(activeReplicaCount >= replicationFactor) break;
}
log.info(zkStateReader.getClusterState().toString());
collectionClient.add(getDoc(id, 66, i1, -600, tlong, 600, t1,
"humpty dumpy sat on a wall", _ROUTE_,"x"));
collectionClient.commit();
assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"x")).getResults().getNumFound());
int numShards = 4;
replicationFactor = TestUtil.nextInt(random(), 0, 3) + 2;
int maxShardsPerNode = (((numShards * replicationFactor) / getCommonCloudSolrClient()
.getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
CloudSolrClient client = null;
String shard_fld = "shard_s";
HttpSolrClient collectionClient = new HttpSolrClient(url);
try {
client = createCloudClient(null);
Map<String, Object> props = ZkNodeProps.makeMap(
"router.name", ImplicitDocRouter.NAME,
REPLICATION_FACTOR, replicationFactor,
MAX_SHARDS_PER_NODE, maxShardsPerNode,
SHARDS_PROP,"a,b,c,d",
"router.field", shard_fld);
collectionName = COLL_PREFIX + "withShardField";
createCollection(collectionInfos, collectionName,props,client);
// lets try and use the solrj client to index a couple documents
collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
"humpty dumpy sat on a wall", _ROUTE_,"a"));
collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
"humpty dumpy3 sat on a walls", _ROUTE_,"a"));
collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
"humpty dumpy2 sat on a walled", _ROUTE_,"a"));
collectionClient.commit();
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
collectionClient.deleteByQuery("*:*");
collectionClient.commit(true,true);
assertEquals(0, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
UpdateRequest up = new UpdateRequest();
up.setParam(_ROUTE_, "c");
up.setParam("commit","true");
up.add(getDoc(id, 9, i1, -600, tlong, 600, t1,
"humpty dumpy sat on a wall"));
up.add(getDoc(id, 10, i1, -600, tlong, 600, t1,
"humpty dumpy3 sat on a walls"));
up.add(getDoc(id, 11, i1, -600, tlong, 600, t1,
"humpty dumpy2 sat on a walled"));
collectionClient.request(up);
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"a")).getResults().getNumFound());
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"c")).getResults().getNumFound());
//Testing CREATESHARD
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionAction.CREATESHARD.toString());
params.set("collection", collectionName);
params.set("shard", "x");
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
SolrClient server = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
server.request(request);
server.shutdown();
waitForCollection(zkStateReader,collectionName,4);
//wait for all the replicas to become active
int attempts = 0;
while(true){
if(attempts>30 ) fail("Not enough active replicas in the shard 'x'");
attempts++;
int activeReplicaCount = 0;
for (Replica x : zkStateReader.getClusterState().getCollection(collectionName).getSlice("x").getReplicas()) {
if("active".equals(x.getStr("state"))) activeReplicaCount++;
}
Thread.sleep(500);
if(activeReplicaCount >= replicationFactor) break;
}
log.info(zkStateReader.getClusterState().toString());
collectionClient.add(getDoc(id, 66, i1, -600, tlong, 600, t1,
"humpty dumpy sat on a wall", _ROUTE_,"x"));
collectionClient.commit();
assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"x")).getResults().getNumFound());
int numShards = 4;
replicationFactor = TestUtil.nextInt(random(), 0, 3) + 2;
int maxShardsPerNode = (((numShards * replicationFactor) / getCommonCloudSolrClient()
.getZkStateReader().getClusterState().getLiveNodes().size())) + 1;
CloudSolrClient client = null;
try {
client = createCloudClient(null);
Map<String, Object> props = ZkNodeProps.makeMap(
"router.name", ImplicitDocRouter.NAME,
REPLICATION_FACTOR, replicationFactor,
MAX_SHARDS_PER_NODE, maxShardsPerNode,
SHARDS_PROP,"a,b,c,d",
"router.field", shard_fld);
collectionName = COLL_PREFIX + "withShardField";
createCollection(collectionInfos, collectionName,props,client);
} finally {
if (client != null) client.shutdown();
}
List<Integer> list = collectionInfos.get(collectionName);
checkForCollection(collectionName, list, null);
url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
} finally {
if (client != null) client.shutdown();
collectionClient.shutdown();
}
collectionClient = new HttpSolrClient(url);
try {
// poll for a second - it can take a moment before we are ready to serve
waitForNon403or404or503(collectionClient);
} finally {
collectionClient.shutdown();
}
List<Integer> list = collectionInfos.get(collectionName);
checkForCollection(collectionName, list, null);
url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
collectionClient.shutdown();
collectionClient = new HttpSolrClient(url);
// poll for a second - it can take a moment before we are ready to serve
waitForNon403or404or503(collectionClient);
collectionClient.shutdown();
collectionClient = new HttpSolrClient(url);
// lets try and use the solrj client to index a couple documents
collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
"humpty dumpy sat on a wall", shard_fld,"a"));
collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
"humpty dumpy3 sat on a walls", shard_fld,"a"));
collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
"humpty dumpy2 sat on a walled", shard_fld,"a"));
collectionClient.commit();
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
//TODO debug the following case
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
collectionClient.shutdown();
collectionClient = new HttpSolrClient(url);
try {
// lets try and use the solrj client to index a couple documents
collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
"humpty dumpy sat on a wall", shard_fld,"a"));
collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
"humpty dumpy3 sat on a walls", shard_fld,"a"));
collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
"humpty dumpy2 sat on a walled", shard_fld,"a"));
collectionClient.commit();
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
assertEquals(0, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_,"b")).getResults().getNumFound());
//TODO debug the following case
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
} finally {
collectionClient.shutdown();
}
}
private void testRouteFieldForHashRouter()throws Exception{
@ -384,39 +392,43 @@ public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
String url = getUrlFromZk(getCommonCloudSolrClient().getZkStateReader().getClusterState(), collectionName);
HttpSolrClient collectionClient = new HttpSolrClient(url);
// poll for a second - it can take a moment before we are ready to serve
waitForNon403or404or503(collectionClient);
collectionClient.shutdown();
try {
// poll for a second - it can take a moment before we are ready to serve
waitForNon403or404or503(collectionClient);
collectionClient.shutdown();
} finally {
collectionClient.shutdown();
}
collectionClient = new HttpSolrClient(url);
// lets try and use the solrj client to index a couple documents
collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
"humpty dumpy sat on a wall", shard_fld,"a"));
collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
"humpty dumpy3 sat on a walls", shard_fld,"a"));
collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
"humpty dumpy2 sat on a walled", shard_fld,"a"));
collectionClient.commit();
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
//TODO debug the following case
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
collectionClient.deleteByQuery("*:*");
collectionClient.commit();
collectionClient.add (getDoc( id,100,shard_fld, "b!doc1"));
collectionClient.commit();
assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "b!")).getResults().getNumFound());
collectionClient.shutdown();
try {
// lets try and use the solrj client to index a couple documents
collectionClient.add(getDoc(id, 6, i1, -600, tlong, 600, t1,
"humpty dumpy sat on a wall", shard_fld,"a"));
collectionClient.add(getDoc(id, 7, i1, -600, tlong, 600, t1,
"humpty dumpy3 sat on a walls", shard_fld,"a"));
collectionClient.add(getDoc(id, 8, i1, -600, tlong, 600, t1,
"humpty dumpy2 sat on a walled", shard_fld,"a"));
collectionClient.commit();
assertEquals(3, collectionClient.query(new SolrQuery("*:*")).getResults().getNumFound());
//TODO debug the following case
assertEquals(3, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "a")).getResults().getNumFound());
collectionClient.deleteByQuery("*:*");
collectionClient.commit();
collectionClient.add (getDoc( id,100,shard_fld, "b!doc1"));
collectionClient.commit();
assertEquals(1, collectionClient.query(new SolrQuery("*:*").setParam(_ROUTE_, "b!")).getResults().getNumFound());
} finally {
collectionClient.shutdown();
}
}
private void testCreateShardRepFactor() throws Exception {
@ -445,7 +457,9 @@ public class CustomCollectionTest extends AbstractFullDistribZkTestBase {
params.set("shard", "x");
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0))).request(request);
SolrClient server = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
server.request(request);
server.shutdown();
waitForRecoveriesToFinish(collectionName, zkStateReader, false);

View File

@ -147,11 +147,14 @@ public class DeleteShardTest extends AbstractFullDistribZkTestBase {
.getBaseURL();
baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
HttpSolrClient baseClient = new HttpSolrClient(baseUrl);
baseClient.setConnectionTimeout(15000);
baseClient.setSoTimeout(60000);
baseClient.request(request);
baseClient.shutdown();
HttpSolrClient baseServer = new HttpSolrClient(baseUrl);
try {
baseServer.setConnectionTimeout(15000);
baseServer.setSoTimeout(60000);
baseServer.request(request);
} finally {
baseServer.shutdown();
}
}
protected void setSliceState(String slice, String state) throws SolrServerException, IOException,

View File

@ -17,16 +17,18 @@ package org.apache.solr.cloud;
* limitations under the License.
*/
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.common.cloud.Replica;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.common.cloud.Replica;
/**
* Tests leader-initiated recovery scenarios after a leader node fails
* and one of the replicas is out-of-sync.
@ -109,10 +111,20 @@ public class LeaderFailoverAfterPartitionTest extends HttpPartitionTest {
// indexing during a partition
// doc should be on leader and 1 replica
sendDoc(5);
assertDocExists(getHttpSolrClient(leader, testCollectionName), testCollectionName, "5");
assertDocExists(getHttpSolrClient(notLeaders.get(1), testCollectionName), testCollectionName, "5");
HttpSolrClient server = getHttpSolrClient(leader, testCollectionName);
try {
assertDocExists(server, testCollectionName, "5");
} finally {
server.shutdown();
}
try {
server = getHttpSolrClient(notLeaders.get(1), testCollectionName);
assertDocExists(server, testCollectionName, "5");
} finally {
server.shutdown();
}
Thread.sleep(sleepMsBeforeHealPartition);
String shouldNotBeNewLeaderNode = notLeaders.get(0).getNodeName();
@ -160,12 +172,14 @@ public class LeaderFailoverAfterPartitionTest extends HttpPartitionTest {
printClusterStateInfo(testCollectionName),
participatingReplicas.size() >= 2);
sendDoc(6);
Set<String> replicasToCheck = new HashSet<>();
for (Replica stillUp : participatingReplicas)
replicasToCheck.add(stillUp.getName());
waitToSeeReplicasActive(testCollectionName, "shard1", replicasToCheck, 20);
waitToSeeReplicasActive(testCollectionName, "shard1", replicasToCheck, 90);
assertDocsExistInAllReplicas(participatingReplicas, testCollectionName, 1, 6);
// try to clean up

View File

@ -91,8 +91,12 @@ public class LeaderInitiatedRecoveryOnCommitTest extends BasicDistributedZkTest
// let's find the leader of shard2 and ask him to commit
Replica shard2Leader = cloudClient.getZkStateReader().getLeaderRetry(testCollectionName, "shard2");
HttpSolrClient client = new HttpSolrClient(ZkCoreNodeProps.getCoreUrl(shard2Leader.getStr("base_url"), shard2Leader.getStr("core")));
client.commit();
HttpSolrClient server = new HttpSolrClient(ZkCoreNodeProps.getCoreUrl(shard2Leader.getStr("base_url"), shard2Leader.getStr("core")));
try {
server.commit();
} finally {
server.shutdown();
}
Thread.sleep(sleepMsBeforeHealPartition);
@ -135,6 +139,7 @@ public class LeaderInitiatedRecoveryOnCommitTest extends BasicDistributedZkTest
Replica replica = notLeaders.get(0);
HttpSolrClient client = new HttpSolrClient(ZkCoreNodeProps.getCoreUrl(replica.getStr("base_url"), replica.getStr("core")));
client.commit();
client.shutdown();
Thread.sleep(sleepMsBeforeHealPartition);

View File

@ -77,130 +77,141 @@ public class MultiThreadedOCPTest extends AbstractFullDistribZkTestBase {
private void testParallelCollectionAPICalls() throws IOException, SolrServerException {
SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
for(int i = 1 ; i <= NUM_COLLECTIONS ; i++) {
Create createCollectionRequest = new Create();
createCollectionRequest.setCollectionName("ocptest" + i);
createCollectionRequest.setNumShards(4);
createCollectionRequest.setConfigName("conf1");
createCollectionRequest.setAsyncId(String.valueOf(i));
createCollectionRequest.process(client);
}
boolean pass = false;
int counter = 0;
while(true) {
int numRunningTasks = 0;
for (int i = 1; i <= NUM_COLLECTIONS; i++)
if (getRequestState(i + "", client).equals("running"))
numRunningTasks++;
if(numRunningTasks > 1) {
pass = true;
break;
} else if(counter++ > 100)
break;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
try {
for(int i = 1 ; i <= NUM_COLLECTIONS ; i++) {
Create createCollectionRequest = new Create();
createCollectionRequest.setCollectionName("ocptest" + i);
createCollectionRequest.setNumShards(4);
createCollectionRequest.setConfigName("conf1");
createCollectionRequest.setAsyncId(String.valueOf(i));
createCollectionRequest.process(client);
}
}
assertTrue("More than one tasks were supposed to be running in parallel but they weren't.", pass);
for(int i=1;i<=NUM_COLLECTIONS;i++) {
String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
boolean pass = false;
int counter = 0;
while(true) {
int numRunningTasks = 0;
for (int i = 1; i <= NUM_COLLECTIONS; i++)
if (getRequestState(i + "", client).equals("running"))
numRunningTasks++;
if(numRunningTasks > 1) {
pass = true;
break;
} else if(counter++ > 100)
break;
try {
Thread.sleep(100);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
assertTrue("More than one tasks were supposed to be running in parallel but they weren't.", pass);
for(int i=1;i<=NUM_COLLECTIONS;i++) {
String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
}
} finally {
client.shutdown();
}
}
private void testTaskExclusivity() throws IOException, SolrServerException {
SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
Create createCollectionRequest = new Create();
createCollectionRequest.setCollectionName("ocptest_shardsplit");
createCollectionRequest.setNumShards(4);
createCollectionRequest.setConfigName("conf1");
createCollectionRequest.setAsyncId("1000");
createCollectionRequest.process(client);
SplitShard splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("ocptest_shardsplit");
splitShardRequest.setShardName(SHARD1);
splitShardRequest.setAsyncId("1001");
splitShardRequest.process(client);
splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("ocptest_shardsplit");
splitShardRequest.setShardName(SHARD2);
splitShardRequest.setAsyncId("1002");
splitShardRequest.process(client);
int iterations = 0;
while(true) {
int runningTasks = 0;
int completedTasks = 0;
try {
Create createCollectionRequest = new Create();
createCollectionRequest.setCollectionName("ocptest_shardsplit");
createCollectionRequest.setNumShards(4);
createCollectionRequest.setConfigName("conf1");
createCollectionRequest.setAsyncId("1000");
createCollectionRequest.process(client);
SplitShard splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("ocptest_shardsplit");
splitShardRequest.setShardName(SHARD1);
splitShardRequest.setAsyncId("1001");
splitShardRequest.process(client);
splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("ocptest_shardsplit");
splitShardRequest.setShardName(SHARD2);
splitShardRequest.setAsyncId("1002");
splitShardRequest.process(client);
int iterations = 0;
while(true) {
int runningTasks = 0;
int completedTasks = 0;
for (int i=1001;i<=1002;i++) {
String state = getRequestState(i, client);
if (state.equals("running"))
runningTasks++;
if (state.equals("completed"))
completedTasks++;
assertTrue("We have a failed SPLITSHARD task", !state.equals("failed"));
}
// TODO: REQUESTSTATUS might come back with more than 1 running tasks over multiple calls.
// The only way to fix this is to support checking of multiple requestids in a single REQUESTSTATUS task.
assertTrue("Mutual exclusion failed. Found more than one task running for the same collection", runningTasks < 2);
if(completedTasks == 2 || iterations++ > REQUEST_STATUS_TIMEOUT)
break;
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
for (int i=1001;i<=1002;i++) {
String state = getRequestState(i, client);
if (state.equals("running"))
runningTasks++;
if (state.equals("completed"))
completedTasks++;
assertTrue("We have a failed SPLITSHARD task", !state.equals("failed"));
String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
}
// TODO: REQUESTSTATUS might come back with more than 1 running tasks over multiple calls.
// The only way to fix this is to support checking of multiple requestids in a single REQUESTSTATUS task.
assertTrue("Mutual exclusion failed. Found more than one task running for the same collection", runningTasks < 2);
if(completedTasks == 2 || iterations++ > REQUEST_STATUS_TIMEOUT)
break;
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
for (int i=1001;i<=1002;i++) {
String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
} finally {
client.shutdown();
}
}
private void testDeduplicationOfSubmittedTasks() throws IOException, SolrServerException {
SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
Create createCollectionRequest = new Create();
createCollectionRequest.setCollectionName("ocptest_shardsplit2");
createCollectionRequest.setNumShards(4);
createCollectionRequest.setConfigName("conf1");
createCollectionRequest.setAsyncId("3000");
createCollectionRequest.process(client);
SplitShard splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("ocptest_shardsplit2");
splitShardRequest.setShardName(SHARD1);
splitShardRequest.setAsyncId("3001");
splitShardRequest.process(client);
splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("ocptest_shardsplit2");
splitShardRequest.setShardName(SHARD2);
splitShardRequest.setAsyncId("3002");
splitShardRequest.process(client);
// Now submit another task with the same id. At this time, hopefully the previous 3002 should still be in the queue.
splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("ocptest_shardsplit2");
splitShardRequest.setShardName(SHARD1);
splitShardRequest.setAsyncId("3002");
CollectionAdminResponse response = splitShardRequest.process(client);
NamedList r = response.getResponse();
assertEquals("Duplicate request was supposed to exist but wasn't found. De-duplication of submitted task failed.",
"Task with the same requestid already exists.", r.get("error"));
for (int i=3001;i<=3002;i++) {
String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
try {
Create createCollectionRequest = new Create();
createCollectionRequest.setCollectionName("ocptest_shardsplit2");
createCollectionRequest.setNumShards(4);
createCollectionRequest.setConfigName("conf1");
createCollectionRequest.setAsyncId("3000");
createCollectionRequest.process(client);
SplitShard splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("ocptest_shardsplit2");
splitShardRequest.setShardName(SHARD1);
splitShardRequest.setAsyncId("3001");
splitShardRequest.process(client);
splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("ocptest_shardsplit2");
splitShardRequest.setShardName(SHARD2);
splitShardRequest.setAsyncId("3002");
splitShardRequest.process(client);
// Now submit another task with the same id. At this time, hopefully the previous 3002 should still be in the queue.
splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("ocptest_shardsplit2");
splitShardRequest.setShardName(SHARD1);
splitShardRequest.setAsyncId("3002");
CollectionAdminResponse response = splitShardRequest.process(client);
NamedList r = response.getResponse();
assertEquals("Duplicate request was supposed to exist but wasn't found. De-duplication of submitted task failed.",
"Task with the same requestid already exists.", r.get("error"));
for (int i=3001;i<=3002;i++) {
String state = getRequestStateAfterCompletion(i + "", REQUEST_STATUS_TIMEOUT, client);
assertTrue("Task " + i + " did not complete, final state: " + state,state.equals("completed"));
}
} finally {
client.shutdown();
}
}
@ -221,10 +232,9 @@ public class MultiThreadedOCPTest extends AbstractFullDistribZkTestBase {
}
};
indexThread.start();
SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
try {
SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
SplitShard splitShardRequest = new SplitShard();
splitShardRequest.setCollectionName("collection1");
splitShardRequest.setShardName(SHARD1);
@ -258,6 +268,8 @@ public class MultiThreadedOCPTest extends AbstractFullDistribZkTestBase {
indexThread.join();
} catch (InterruptedException e) {
log.warn("Indexing thread interrupted.");
} finally {
client.shutdown();
}
}
}

View File

@ -71,6 +71,8 @@ public class OverseerTest extends SolrTestCaseJ4 {
private List<Overseer> overseers = new ArrayList<>();
private List<ZkStateReader> readers = new ArrayList<>();
private List<HttpShardHandlerFactory> httpShardHandlerFactorys = new ArrayList<>();
private List<UpdateShardHandler> updateShardHandlers = new ArrayList<>();
private String collection = "collection1";
@ -205,6 +207,16 @@ public class OverseerTest extends SolrTestCaseJ4 {
reader.close();
}
readers.clear();
for (HttpShardHandlerFactory handlerFactory : httpShardHandlerFactorys) {
handlerFactory.close();
}
httpShardHandlerFactorys.clear();
for (UpdateShardHandler updateShardHandler : updateShardHandlers) {
updateShardHandler.close();
}
updateShardHandlers.clear();
}
@Test
@ -1118,8 +1130,11 @@ public class OverseerTest extends SolrTestCaseJ4 {
overseers.get(overseers.size() -1).getZkStateReader().getZkClient().close();
}
UpdateShardHandler updateShardHandler = new UpdateShardHandler(null);
updateShardHandlers.add(updateShardHandler);
HttpShardHandlerFactory httpShardHandlerFactory = new HttpShardHandlerFactory();
httpShardHandlerFactorys.add(httpShardHandlerFactory);
Overseer overseer = new Overseer(
new HttpShardHandlerFactory().getShardHandler(), updateShardHandler, "/admin/cores", reader, null, new MockConfigSolr());
httpShardHandlerFactory.getShardHandler(), updateShardHandler, "/admin/cores", reader, null, new MockConfigSolr());
overseers.add(overseer);
ElectionContext ec = new OverseerElectionContext(zkClient, overseer,
address.replaceAll("/", "_"));

View File

@ -525,11 +525,14 @@ public class ShardSplitTest extends BasicDistributedZkTest {
.getBaseURL();
baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
HttpSolrClient baseClient = new HttpSolrClient(baseUrl);
baseClient.setConnectionTimeout(30000);
baseClient.setSoTimeout(60000 * 5);
baseClient.request(request);
baseClient.shutdown();
HttpSolrClient baseServer = new HttpSolrClient(baseUrl);
try {
baseServer.setConnectionTimeout(30000);
baseServer.setSoTimeout(60000 * 5);
baseServer.request(request);
} finally {
baseServer.shutdown();
}
}
protected void indexAndUpdateCount(DocRouter router, List<DocRouter.Range> ranges, int[] docCounts, String id, int n) throws Exception {

View File

@ -29,6 +29,7 @@ import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.core.ConfigSolr;
import org.apache.solr.core.SolrResourceLoader;
import org.apache.solr.servlet.SolrDispatchFilter;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -52,10 +53,19 @@ public class SolrXmlInZkTest extends SolrTestCaseJ4 {
private ConfigSolr cfg;
private SolrDispatchFilter solrDispatchFilter;
@Before
public void beforeClass() {
public void before() {
System.setProperty("solr.solrxml.location", "zookeeper");
}
@After
public void after() {
if (solrDispatchFilter != null) {
solrDispatchFilter.destroy();
}
}
private void setUpZkAndDiskXml(boolean toZk, boolean leaveOnLocal) throws Exception {
File tmpDir = createTempDir().toFile();
@ -96,8 +106,9 @@ public class SolrXmlInZkTest extends SolrTestCaseJ4 {
Method method = SolrDispatchFilter.class.getDeclaredMethod("loadConfigSolr", SolrResourceLoader.class);
method.setAccessible(true);
Object obj = method.invoke(new SolrDispatchFilter(), new SolrResourceLoader(null));
if (solrDispatchFilter != null) solrDispatchFilter.destroy();
solrDispatchFilter = new SolrDispatchFilter();
Object obj = method.invoke(solrDispatchFilter, new SolrResourceLoader(null));
cfg = (ConfigSolr) obj;
log.info("####SETUP_END " + getTestName());
@ -200,7 +211,9 @@ public class SolrXmlInZkTest extends SolrTestCaseJ4 {
try {
Method method = SolrDispatchFilter.class.getDeclaredMethod("loadConfigSolr", SolrResourceLoader.class);
method.setAccessible(true);
method.invoke(new SolrDispatchFilter(), new SolrResourceLoader(null));
if (solrDispatchFilter != null) solrDispatchFilter.destroy();
solrDispatchFilter = new SolrDispatchFilter();
method.invoke(solrDispatchFilter, new SolrResourceLoader(null));
fail("Should have thrown an exception");
} catch (InvocationTargetException ite) {
assertTrue("Should be catching a SolrException", ite.getTargetException() instanceof SolrException);

View File

@ -45,7 +45,6 @@ public class TestLeaderElectionZkExpiry extends SolrTestCaseJ4 {
server.run();
AbstractZkTestCase.tryCleanSolrZkNode(server.getZkHost());
AbstractZkTestCase.makeSolrZkNode(server.getZkHost());
cc.load();
final ZkController zkController = new ZkController(cc, server.getZkAddress(), 15000, 30000, "dummy.host.com", "8984", "/solr", 180000, 180000, true, new CurrentCoreDescriptorProvider() {
@Override

View File

@ -218,8 +218,10 @@ public class TestRequestStatusCollectionAPI extends BasicDistributedZkTest {
.getBaseURL();
baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
HttpSolrClient baseClient = new HttpSolrClient(baseUrl);
baseClient.setConnectionTimeout(15000);
return baseClient.request(request);
HttpSolrClient baseServer = new HttpSolrClient(baseUrl);
baseServer.setConnectionTimeout(15000);
NamedList<Object> resp = baseServer.request(request);
baseServer.shutdown();
return resp;
}
}

View File

@ -35,9 +35,7 @@ import org.apache.solr.core.CoreDescriptor;
import org.apache.solr.core.CoresLocator;
import org.apache.solr.handler.admin.CoreAdminHandler;
import org.apache.solr.handler.component.HttpShardHandlerFactory;
import org.apache.solr.handler.component.ShardHandlerFactory;
import org.apache.solr.update.UpdateShardHandler;
import org.apache.solr.util.ExternalPaths;
import org.apache.zookeeper.CreateMode;
import org.junit.AfterClass;
import org.junit.BeforeClass;

View File

@ -110,7 +110,12 @@ public class TestCoreDiscovery extends SolrTestCaseJ4 {
private CoreContainer init() throws Exception {
final CoreContainer cores = new CoreContainer();
cores.load();
try {
cores.load();
} catch (Exception e) {
cores.shutdown();
throw e;
}
return cores;
}

View File

@ -26,6 +26,7 @@ import org.apache.solr.handler.TestBlobHandler;
import org.apache.solr.util.RESTfulServerProvider;
import org.apache.solr.util.RestTestHarness;
import org.apache.solr.util.SimplePostTool;
import org.junit.After;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -55,16 +56,18 @@ public class TestDynamicLoading extends AbstractFullDistribZkTestBase {
}
}
@After
public void testDown() throws Exception {
super.tearDown();
for (RestTestHarness r : restTestHarnesses) {
r.close();
}
}
@Override
public void doTest() throws Exception {
setupHarnesses();
dynamicLoading();
}
private void dynamicLoading() throws Exception {
@ -201,6 +204,4 @@ public class TestDynamicLoading extends AbstractFullDistribZkTestBase {
return bos.getByteBuffer();
}
}

View File

@ -30,7 +30,6 @@ public class TestImplicitCoreProperties extends SolrTestCaseJ4 {
CoreContainer cc = createCoreContainer(TEST_HOME(), SOLRXML);
try {
cc.load();
assertQ(req("q", "*:*")
, "//str[@name='dummy1'][.='collection1']"
, "//str[@name='dummy2'][.='data"+File.separator+"']"

View File

@ -92,6 +92,9 @@ public class TestSolrConfigHandler extends RestTestBase {
jetty = null;
}
client = null;
if (restTestHarness != null) {
restTestHarness.close();
}
restTestHarness = null;
}

View File

@ -60,46 +60,48 @@ public class TestBlobHandler extends AbstractFullDistribZkTestBase {
private void doBlobHandlerTest() throws Exception {
SolrClient client = createNewSolrClient("", getBaseUrl((HttpSolrClient) clients.get(0)));
try {
CollectionAdminResponse response1;
CollectionAdminRequest.Create createCollectionRequest = new CollectionAdminRequest.Create();
createCollectionRequest.setCollectionName(".system");
createCollectionRequest.setNumShards(1);
createCollectionRequest.setReplicationFactor(2);
response1 = createCollectionRequest.process(client);
assertEquals(0, response1.getStatus());
assertTrue(response1.isSuccess());
DocCollection sysColl = cloudClient.getZkStateReader().getClusterState().getCollection(".system");
Replica replica = sysColl.getActiveSlicesMap().values().iterator().next().getLeader();
CollectionAdminResponse response1;
CollectionAdminRequest.Create createCollectionRequest = new CollectionAdminRequest.Create();
createCollectionRequest.setCollectionName(".system");
createCollectionRequest.setNumShards(1);
createCollectionRequest.setReplicationFactor(2);
response1 = createCollectionRequest.process(client);
assertEquals(0, response1.getStatus());
assertTrue(response1.isSuccess());
DocCollection sysColl = cloudClient.getZkStateReader().getClusterState().getCollection(".system");
Replica replica = sysColl.getActiveSlicesMap().values().iterator().next().getLeader();
String baseUrl = replica.getStr(ZkStateReader.BASE_URL_PROP);
String url = baseUrl + "/.system/config/requestHandler";
Map map = TestSolrConfigHandlerConcurrent.getAsMap(url, cloudClient);
assertNotNull(map);
assertEquals("solr.BlobHandler", getObjectByPath(map, true, Arrays.asList(
"config",
"requestHandler",
"/blob",
"class")));
String baseUrl = replica.getStr(ZkStateReader.BASE_URL_PROP);
String url = baseUrl + "/.system/config/requestHandler";
Map map = TestSolrConfigHandlerConcurrent.getAsMap(url, cloudClient);
assertNotNull(map);
assertEquals("solr.BlobHandler", getObjectByPath(map, true, Arrays.asList(
"config",
"requestHandler",
"/blob",
"class")));
byte[] bytarr = new byte[1024];
for (int i = 0; i < bytarr.length; i++) bytarr[i]= (byte) (i % 127);
byte[] bytarr2 = new byte[2048];
for (int i = 0; i < bytarr2.length; i++) bytarr2[i]= (byte) (i % 127);
postAndCheck(cloudClient, baseUrl, ByteBuffer.wrap( bytarr), 1);
postAndCheck(cloudClient, baseUrl, ByteBuffer.wrap( bytarr2), 2);
byte[] bytarr = new byte[1024];
for (int i = 0; i < bytarr.length; i++) bytarr[i]= (byte) (i % 127);
byte[] bytarr2 = new byte[2048];
for (int i = 0; i < bytarr2.length; i++) bytarr2[i]= (byte) (i % 127);
postAndCheck(cloudClient, baseUrl, ByteBuffer.wrap( bytarr), 1);
postAndCheck(cloudClient, baseUrl, ByteBuffer.wrap( bytarr2), 2);
url = baseUrl + "/.system/blob/test/1";
map = TestSolrConfigHandlerConcurrent.getAsMap(url,cloudClient);
List l = (List) ConfigOverlay.getObjectByPath(map, false, Arrays.asList("response", "docs"));
assertNotNull(""+map, l);
assertTrue("" + map, l.size() > 0);
map = (Map) l.get(0);
assertEquals(""+bytarr.length,String.valueOf(map.get("size")));
compareInputAndOutput(baseUrl+"/.system/blob/test?wt=filestream", bytarr2);
compareInputAndOutput(baseUrl+"/.system/blob/test/1?wt=filestream", bytarr);
url = baseUrl + "/.system/blob/test/1";
map = TestSolrConfigHandlerConcurrent.getAsMap(url,cloudClient);
List l = (List) ConfigOverlay.getObjectByPath(map, false, Arrays.asList("response", "docs"));
assertNotNull(""+map, l);
assertTrue("" + map, l.size() > 0);
map = (Map) l.get(0);
assertEquals(""+bytarr.length,String.valueOf(map.get("size")));
compareInputAndOutput(baseUrl+"/.system/blob/test?wt=filestream", bytarr2);
compareInputAndOutput(baseUrl+"/.system/blob/test/1?wt=filestream", bytarr);
} finally {
client.shutdown();
}
}
public static void createSysColl(SolrClient client) throws SolrServerException, IOException {

View File

@ -17,6 +17,9 @@ package org.apache.solr.handler;
* limitations under the License.
*/
import static java.util.Arrays.asList;
import static org.apache.solr.core.ConfigOverlay.getObjectByPath;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat;
@ -44,14 +47,12 @@ import org.apache.solr.util.RestTestHarness;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.data.Stat;
import org.junit.After;
import org.noggit.JSONParser;
import org.noggit.ObjectBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.Arrays.asList;
import static org.apache.solr.core.ConfigOverlay.getObjectByPath;
public class TestConfigReload extends AbstractFullDistribZkTestBase {
@ -69,11 +70,25 @@ public class TestConfigReload extends AbstractFullDistribZkTestBase {
restTestHarnesses.add(harness);
}
}
@After
public void tearDown() throws Exception {
super.tearDown();
for (RestTestHarness h : restTestHarnesses) {
h.close();
}
}
@Override
public void doTest() throws Exception {
setupHarnesses();
reloadTest();
try {
reloadTest();
} finally {
for (RestTestHarness h : restTestHarnesses) {
h.close();
}
}
}
private void reloadTest() throws Exception {

View File

@ -905,6 +905,9 @@ public class TestReplicationHandler extends SolrTestCaseJ4 {
repeater.tearDown();
repeaterJetty = null;
}
if (repeaterClient != null) {
repeaterClient.shutdown();
}
}
}

View File

@ -35,6 +35,7 @@ import org.apache.solr.core.ConfigOverlay;
import org.apache.solr.core.TestSolrConfigHandler;
import org.apache.solr.util.RESTfulServerProvider;
import org.apache.solr.util.RestTestHarness;
import org.junit.After;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -53,13 +54,20 @@ public class TestSolrConfigHandlerCloud extends AbstractFullDistribZkTestBase {
restTestHarnesses.add(harness);
}
}
@After
public void tearDown() throws Exception {
super.tearDown();
for (RestTestHarness r : restTestHarnesses) {
r.close();
}
}
@Override
public void doTest() throws Exception {
setupHarnesses();
testReqHandlerAPIs();
testReqParams();
}
private void testReqHandlerAPIs() throws Exception {

View File

@ -17,6 +17,10 @@ package org.apache.solr.handler;
* limitations under the License.
*/
import static java.util.Arrays.asList;
import static org.apache.solr.core.ConfigOverlay.getObjectByPath;
import static org.noggit.ObjectBuilder.getVal;
import java.io.IOException;
import java.io.StringReader;
import java.nio.charset.StandardCharsets;
@ -43,15 +47,12 @@ import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.core.ConfigOverlay;
import org.apache.solr.util.RESTfulServerProvider;
import org.apache.solr.util.RestTestHarness;
import org.junit.After;
import org.noggit.JSONParser;
import org.noggit.ObjectBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.Arrays.asList;
import static org.apache.solr.core.ConfigOverlay.getObjectByPath;
import static org.noggit.ObjectBuilder.getVal;
public class TestSolrConfigHandlerConcurrent extends AbstractFullDistribZkTestBase {
@ -70,6 +71,14 @@ public class TestSolrConfigHandlerConcurrent extends AbstractFullDistribZkTestBa
restTestHarnesses.add(harness);
}
}
@After
public void tearDown() throws Exception {
super.tearDown();
for (RestTestHarness h : restTestHarnesses) {
h.close();
}
}
@Override
public void doTest() throws Exception {
@ -132,15 +141,24 @@ public class TestSolrConfigHandlerConcurrent extends AbstractFullDistribZkTestBa
Set<String> errmessages = new HashSet<>();
for(int i =1;i<2;i++){//make it ahigher number
RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
payload = payload.replaceAll("CACHENAME" , cacheName);
String val1 = String.valueOf(10 * i + 1);
payload = payload.replace("CACHEVAL1", val1);
String val2 = String.valueOf(10 * i + 2);
payload = payload.replace("CACHEVAL2", val2);
String val3 = String.valueOf(10 * i + 3);
payload = payload.replace("CACHEVAL3", val3);
String response = publisher.post("/config?wt=json", SolrTestCaseJ4.json(payload));
String response;
String val1;
String val2;
String val3;
try {
payload = payload.replaceAll("CACHENAME" , cacheName);
val1 = String.valueOf(10 * i + 1);
payload = payload.replace("CACHEVAL1", val1);
val2 = String.valueOf(10 * i + 2);
payload = payload.replace("CACHEVAL2", val2);
val3 = String.valueOf(10 * i + 3);
payload = payload.replace("CACHEVAL3", val3);
response = publisher.post("/config?wt=json", SolrTestCaseJ4.json(payload));
} finally {
publisher.close();
}
Map map = (Map) getVal(new JSONParser(new StringReader(response)));
Object errors = map.get("errors");
if(errors!= null){

View File

@ -73,6 +73,9 @@ public class TestBulkSchemaAPI extends RestTestBase {
jetty = null;
}
client = null;
if (restTestHarness != null) {
restTestHarness.close();
}
restTestHarness = null;
}

View File

@ -63,6 +63,9 @@ public class TestManagedSchemaDynamicFieldResource extends RestTestBase {
jetty = null;
}
client = null;
if (restTestHarness != null) {
restTestHarness.close();
}
restTestHarness = null;
}

View File

@ -63,6 +63,9 @@ public class TestManagedSchemaFieldResource extends RestTestBase {
jetty = null;
}
client = null;
if (restTestHarness != null) {
restTestHarness.close();
}
restTestHarness = null;
}

View File

@ -66,6 +66,11 @@ public class TestManagedSchemaFieldTypeResource extends RestTestBase {
jetty = null;
System.clearProperty("managed.schema.mutable");
System.clearProperty("enable.update.log");
if (restTestHarness != null) {
restTestHarness.close();
}
restTestHarness = null;
}
@Test

View File

@ -66,6 +66,11 @@ public class TestManagedStopFilterFactory extends RestTestBase {
jetty = null;
System.clearProperty("managed.schema.mutable");
System.clearProperty("enable.update.log");
if (restTestHarness != null) {
restTestHarness.close();
}
restTestHarness = null;
}

View File

@ -63,6 +63,11 @@ public class TestManagedSynonymFilterFactory extends RestTestBase {
FileUtils.deleteDirectory(tmpSolrHome);
System.clearProperty("managed.schema.mutable");
System.clearProperty("enable.update.log");
if (restTestHarness != null) {
restTestHarness.close();
}
restTestHarness = null;
}
@Test

View File

@ -14,11 +14,17 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.schema;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.solr.SolrJettyTestBase;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.beans.Field;
@ -28,10 +34,7 @@ import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.junit.BeforeClass;
import java.io.File;
import java.nio.ByteBuffer;
import java.util.List;
@SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-5776")
public class TestBinaryField extends SolrJettyTestBase {
@BeforeClass
@ -86,6 +89,7 @@ public class TestBinaryField extends SolrJettyTestBase {
client.commit();
QueryResponse resp = client.query(new SolrQuery("*:*"));
client.shutdown();
SolrDocumentList res = resp.getResults();
List<Bean> beans = resp.getBeans(Bean.class);
assertEquals(3, res.size());

View File

@ -18,18 +18,9 @@ package org.apache.solr.schema;
*/
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.util.RESTfulServerProvider;
import org.apache.solr.util.RestTestHarness;
import org.junit.BeforeClass;
import org.noggit.JSONParser;
import org.noggit.ObjectBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.text.MessageFormat.format;
import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getCopyFields;
import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getObj;
import java.io.IOException;
import java.io.StringReader;
@ -41,9 +32,19 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import static java.text.MessageFormat.format;
import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getCopyFields;
import static org.apache.solr.rest.schema.TestBulkSchemaAPI.getObj;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.util.RESTfulServerProvider;
import org.apache.solr.util.RestTestHarness;
import org.junit.After;
import org.junit.BeforeClass;
import org.noggit.JSONParser;
import org.noggit.ObjectBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestBulkSchemaConcurrent extends AbstractFullDistribZkTestBase {
static final Logger log = LoggerFactory.getLogger(TestBulkSchemaConcurrent.class);
@ -70,6 +71,15 @@ public class TestBulkSchemaConcurrent extends AbstractFullDistribZkTestBase {
restTestHarnesses.add(harness);
}
}
@After
public void tearDown() throws Exception {
super.tearDown();
for (RestTestHarness r : restTestHarnesses) {
r.close();
}
}
@Override
public void doTest() throws Exception {
@ -105,7 +115,6 @@ public class TestBulkSchemaConcurrent extends AbstractFullDistribZkTestBase {
boolean success = true;
for (List e : collectErrors) {
if(e== null) continue;
if(!e.isEmpty()){
success = false;
log.error(e.toString());
@ -166,27 +175,31 @@ public class TestBulkSchemaConcurrent extends AbstractFullDistribZkTestBase {
}
//get another node
RestTestHarness harness = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
long startTime = System.nanoTime();
boolean success = false;
long maxTimeoutMillis = 100000;
Set<String> errmessages = new HashSet<>();
while ( ! success
&& TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) {
errmessages.clear();
Map m = getObj(harness, aField, "fields");
if(m== null) errmessages.add(format("field {0} not created", aField));
m = getObj(harness, dynamicFldName, "dynamicFields");
if(m== null) errmessages.add(format("dynamic field {0} not created", dynamicFldName));
List l = getCopyFields(harness, "a1");
if(!checkCopyField(l,aField,dynamicCopyFldDest))
errmessages.add(format("CopyField source={0},dest={1} not created" , aField,dynamicCopyFldDest));
m = getObj(harness, "mystr", "fieldTypes");
if(m == null) errmessages.add(format("new type {} not created" , newFieldTypeName));
Thread.sleep(10);
RestTestHarness harness = restTestHarnesses.get(r.nextInt(restTestHarnesses.size()));
try {
long startTime = System.nanoTime();
boolean success = false;
long maxTimeoutMillis = 100000;
while (!success
&& TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) {
errmessages.clear();
Map m = getObj(harness, aField, "fields");
if (m == null) errmessages.add(format("field {0} not created", aField));
m = getObj(harness, dynamicFldName, "dynamicFields");
if (m == null) errmessages.add(format("dynamic field {0} not created", dynamicFldName));
List l = getCopyFields(harness, "a1");
if (!checkCopyField(l, aField, dynamicCopyFldDest)) errmessages
.add(format("CopyField source={0},dest={1} not created", aField, dynamicCopyFldDest));
m = getObj(harness, "mystr", "fieldTypes");
if (m == null) errmessages.add(format("new type {} not created", newFieldTypeName));
Thread.sleep(10);
}
} finally {
harness.close();
}
if(!errmessages.isEmpty()){
errs.addAll(errmessages);

View File

@ -16,6 +16,12 @@ package org.apache.solr.schema;
* limitations under the License.
*/
import java.util.ArrayList;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
@ -25,25 +31,17 @@ import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.cloud.ZkCoreNodeProps;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.util.BaseTestHarness;
import org.apache.solr.util.RESTfulServerProvider;
import org.apache.solr.util.RestTestHarness;
import org.apache.zookeeper.data.Stat;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.After;
import org.junit.BeforeClass;
import org.restlet.ext.servlet.ServerServlet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.zookeeper.data.Stat;
import org.junit.BeforeClass;
import java.util.ArrayList;
import java.util.List;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestBase {
private static final Logger log = LoggerFactory.getLogger(TestCloudManagedSchemaConcurrent.class);
private static final String SUCCESS_XPATH = "/response/lst[@name='responseHeader']/int[@name='status'][.='0']";
@ -67,6 +65,14 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
System.setProperty("managed.schema.mutable", "true");
System.setProperty("enable.update.log", "true");
}
@After
public void tearDown() throws Exception {
super.tearDown();
for (RestTestHarness h : restTestHarnesses) {
h.close();
}
}
@Override
protected String getCloudSolrConfig() {
@ -352,8 +358,11 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
return coreUrl.endsWith("/") ? coreUrl.substring(0, coreUrl.length()-1) : coreUrl;
}
});
addFieldTypePut(harness, "fooInt", 15);
try {
addFieldTypePut(harness, "fooInt", 15);
} finally {
harness.close();
}
// go into ZK to get the version of the managed schema after the update
SolrZkClient zkClient = cloudClient.getZkStateReader().getZkClient();
@ -410,21 +419,24 @@ public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestB
return replicaUrl.endsWith("/") ? replicaUrl.substring(0, replicaUrl.length()-1) : replicaUrl;
}
});
long waitMs = waitSecs * 1000L;
if (waitMs > 0) Thread.sleep(waitMs); // wait a moment for the zk watcher to fire
try {
testHarness.validateQuery("/schema/zkversion?wt=xml", "//zkversion=" + schemaZkVersion);
} catch (Exception exc) {
if (retry) {
// brief wait before retrying
Thread.sleep(waitMs > 0 ? waitMs : 2000L);
long waitMs = waitSecs * 1000L;
if (waitMs > 0) Thread.sleep(waitMs); // wait a moment for the zk watcher to fire
try {
testHarness.validateQuery("/schema/zkversion?wt=xml", "//zkversion=" + schemaZkVersion);
} else {
throw exc;
} catch (Exception exc) {
if (retry) {
// brief wait before retrying
Thread.sleep(waitMs > 0 ? waitMs : 2000L);
testHarness.validateQuery("/schema/zkversion?wt=xml", "//zkversion=" + schemaZkVersion);
} else {
throw exc;
}
}
} finally {
testHarness.close();
}
}

View File

@ -27,6 +27,7 @@ import org.apache.solr.util.BaseTestHarness;
import org.apache.solr.util.RESTfulServerProvider;
import org.apache.solr.util.RestTestHarness;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.After;
import org.junit.BeforeClass;
import org.junit.Before;
import org.restlet.ext.servlet.ServerServlet;
@ -58,6 +59,14 @@ public class TestCloudSchemaless extends AbstractFullDistribZkTestBase {
System.setProperty("numShards", Integer.toString(sliceCount));
}
@After
public void teardDown() throws Exception {
super.tearDown();
for (RestTestHarness h : restTestHarnesses) {
h.close();
}
}
public TestCloudSchemaless() {
schemaString = "schema-add-schema-fields-update-processor.xml";

View File

@ -23,8 +23,6 @@ import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.junit.BeforeClass;
import java.util.Arrays;
/**
* Test for QueryComponent's distributed querying
*

View File

@ -19,7 +19,6 @@ package org.apache.solr.search;
import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
@ -42,15 +41,14 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.cloud.hdfs.HdfsBasicDistributedZk2Test;
import org.apache.solr.cloud.hdfs.HdfsTestUtil;
import org.apache.solr.common.util.IOUtils;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.update.DirectUpdateHandler2;
import org.apache.solr.update.HdfsUpdateLog;
import org.apache.solr.update.UpdateHandler;
import org.apache.solr.update.UpdateLog;
import org.apache.solr.update.processor.DistributedUpdateProcessor.DistribPhase;
import org.apache.solr.util.IOUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Ignore;

View File

@ -1055,7 +1055,7 @@ public class CloudSolrClient extends SolrClient {
}
if (clientIsInternal && myClient!=null) {
myClient.getConnectionManager().shutdown();
HttpClientUtil.close(myClient);
}
if(this.threadPool != null && !this.threadPool.isShutdown()) {

View File

@ -29,7 +29,7 @@ import org.apache.solr.common.params.SolrParams;
*/
public class HttpClientConfigurer {
protected void configure(DefaultHttpClient httpClient, SolrParams config) {
public void configure(DefaultHttpClient httpClient, SolrParams config) {
if (config.get(HttpClientUtil.PROP_MAX_CONNECTIONS) != null) {
HttpClientUtil.setMaxConnections(httpClient,

View File

@ -38,6 +38,7 @@ import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.ssl.SSLSocketFactory;
import org.apache.http.conn.ssl.X509HostnameVerifier;
import org.apache.http.entity.HttpEntityWrapper;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.client.DefaultHttpRequestRetryHandler;
import org.apache.http.impl.client.SystemDefaultHttpClient;
@ -54,7 +55,6 @@ import org.slf4j.LoggerFactory;
* Utility class for creating/configuring httpclient instances.
*/
public class HttpClientUtil {
// socket timeout measured in ms, closes a socket if read
// takes longer than x ms to complete. throws
// java.net.SocketTimeoutException: Read timed out exception
@ -109,7 +109,7 @@ public class HttpClientUtil {
* http client configuration, if null a client with default
* configuration (no additional configuration) is created.
*/
public static HttpClient createClient(final SolrParams params) {
public static CloseableHttpClient createClient(final SolrParams params) {
final ModifiableSolrParams config = new ModifiableSolrParams(params);
if (logger.isDebugEnabled()) {
logger.debug("Creating new http client, config:" + config);
@ -123,7 +123,7 @@ public class HttpClientUtil {
* Creates new http client by using the provided configuration.
*
*/
public static HttpClient createClient(final SolrParams params, ClientConnectionManager cm) {
public static CloseableHttpClient createClient(final SolrParams params, ClientConnectionManager cm) {
final ModifiableSolrParams config = new ModifiableSolrParams(params);
if (logger.isDebugEnabled()) {
logger.debug("Creating new http client, config:" + config);
@ -141,6 +141,14 @@ public class HttpClientUtil {
SolrParams config) {
configurer.configure(httpClient, config);
}
public static void close(HttpClient httpClient) {
if (httpClient instanceof CloseableHttpClient) {
org.apache.solr.common.util.IOUtils.closeQuietly((CloseableHttpClient) httpClient);
} else {
httpClient.getConnectionManager().shutdown();
}
}
/**
* Control HTTP payload compression.
@ -269,6 +277,14 @@ public class HttpClientUtil {
}
}
public static void setStaleCheckingEnabled(final HttpClient httpClient, boolean enabled) {
HttpConnectionParams.setStaleCheckingEnabled(httpClient.getParams(), enabled);
}
public static void setTcpNoDelay(final HttpClient httpClient, boolean tcpNoDelay) {
HttpConnectionParams.setTcpNoDelay(httpClient.getParams(), tcpNoDelay);
}
private static class UseCompressionRequestInterceptor implements
HttpRequestInterceptor {

View File

@ -754,7 +754,7 @@ public class HttpSolrClient extends SolrClient {
@Override
public void shutdown() {
if (httpClient != null && internalClient) {
httpClient.getConnectionManager().shutdown();
HttpClientUtil.close(httpClient);
}
}

View File

@ -457,7 +457,7 @@ public class LBHttpSolrClient extends SolrClient {
aliveCheckExecutor.shutdownNow();
}
if(clientIsInternal) {
httpClient.getConnectionManager().shutdown();
HttpClientUtil.close(httpClient);
}
}

View File

@ -26,7 +26,6 @@ import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicLong;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Source;
@ -40,8 +39,8 @@ import org.apache.solr.common.SolrException;
import org.apache.solr.common.StringUtils;
import org.apache.solr.common.cloud.ZkClientConnectionStrategy.ZkUpdate;
import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.ObjectReleaseTracker;
import org.apache.solr.common.util.SolrjNamedThreadFactory;
import org.apache.zookeeper.AsyncCallback;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.NoNodeException;
@ -62,9 +61,6 @@ import org.slf4j.LoggerFactory;
*
*/
public class SolrZkClient implements Closeable {
// These should *only* be used for debugging or monitoring purposes
public static final AtomicLong numOpens = new AtomicLong();
public static final AtomicLong numCloses = new AtomicLong();
static final String NEWL = System.getProperty("line.separator");
@ -183,7 +179,7 @@ public class SolrZkClient implements Closeable {
}
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
}
numOpens.incrementAndGet();
assert ObjectReleaseTracker.track(this);
if (zkACLProvider == null) {
this.zkACLProvider = createZkACLProvider();
} else {
@ -644,7 +640,7 @@ public class SolrZkClient implements Closeable {
connManager.close();
closeCallbackExecutor();
}
numCloses.incrementAndGet();
assert ObjectReleaseTracker.release(this);
}
public boolean isClosed() {

View File

@ -1,8 +1,7 @@
package org.apache.solr.util;
package org.apache.solr.common.util;
import java.io.Closeable;
import org.apache.solr.core.HdfsDirectoryFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

View File

@ -0,0 +1,62 @@
package org.apache.solr.common.util;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
public class ObjectReleaseTracker {
public static Map<Object,String> OBJECTS = new ConcurrentHashMap<>();
public static boolean track(Object object) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
new ObjectTrackerException().printStackTrace(pw);
OBJECTS.put(object, sw.toString());
return true;
}
public static boolean release(Object object) {
OBJECTS.remove(object);
return true;
}
public static boolean clearObjectTrackerAndCheckEmpty() {
Set<Entry<Object,String>> entries = OBJECTS.entrySet();
boolean empty = entries.isEmpty();
if (entries.size() > 0) {
System.err.println("ObjectTracker found objects that were not released!!!");
}
for (Entry<Object,String> entry : entries) {
System.err.println(entry.getValue());
}
OBJECTS.clear();
return empty;
}
private static class ObjectTrackerException extends RuntimeException {
}
}

View File

@ -17,7 +17,7 @@
package org.apache.solr.client.solrj;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
@ -34,15 +34,16 @@ public class SolrExceptionTest extends LuceneTestCase {
// this is a very simple test and most of the test should be considered verified
// if the compiler won't let you by without the try/catch
boolean gotExpectedError = false;
CloseableHttpClient httpClient = null;
try {
// switched to a local address to avoid going out on the net, ns lookup issues, etc.
// set a 1ms timeout to let the connection fail faster.
HttpClient httpClient = HttpClientUtil.createClient(null);
httpClient = HttpClientUtil.createClient(null);
HttpClientUtil.setConnectionTimeout(httpClient, 1);
SolrClient client = new HttpSolrClient("http://[ff01::114]:11235/solr/", httpClient);
SolrQuery query = new SolrQuery("test123");
client.query(query);
client.shutdown();
httpClient.close();
} catch (SolrServerException sse) {
gotExpectedError = true;
/***
@ -50,6 +51,8 @@ public class SolrExceptionTest extends LuceneTestCase {
//If one is using OpenDNS, then you don't get UnknownHostException, instead you get back that the query couldn't execute
|| (sse.getRootCause().getClass() == SolrException.class && ((SolrException) sse.getRootCause()).code() == 302 && sse.getMessage().equals("Error executing query")));
***/
} finally {
if (httpClient != null) HttpClientUtil.close(httpClient);
}
assertTrue(gotExpectedError);
}

View File

@ -17,10 +17,17 @@
package org.apache.solr.client.solrj;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import junit.framework.Assert;
import org.apache.commons.io.FileUtils;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.lucene.util.QuickPatchThreadsFilter;
@ -39,12 +46,7 @@ import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
/**
* Test for LBHttpSolrClient
@ -61,7 +63,7 @@ public class TestLBHttpSolrClient extends SolrTestCaseJ4 {
private static final Logger log = LoggerFactory.getLogger(TestLBHttpSolrClient.class);
SolrInstance[] solr = new SolrInstance[3];
HttpClient httpClient;
CloseableHttpClient httpClient;
// TODO: fix this test to not require FSDirectory
static String savedFactory;
@ -123,7 +125,7 @@ public class TestLBHttpSolrClient extends SolrTestCaseJ4 {
aSolr.tearDown();
}
}
httpClient.getConnectionManager().shutdown();
httpClient.close();
super.tearDown();
}
@ -205,23 +207,26 @@ public class TestLBHttpSolrClient extends SolrTestCaseJ4 {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(HttpClientUtil.PROP_CONNECTION_TIMEOUT, 250);
params.set(HttpClientUtil.PROP_SO_TIMEOUT, 250);
HttpClient myHttpClient = HttpClientUtil.createClient(params);
LBHttpSolrClient client = new LBHttpSolrClient(myHttpClient, s);
client.setAliveCheckInterval(500);
// Kill a server and test again
solr[1].jetty.stop();
solr[1].jetty = null;
// query the servers
for (String value : s)
client.query(new SolrQuery("*:*"));
// Start the killed server once again
solr[1].startJetty();
// Wait for the alive check to complete
waitForServer(30000, client, 3, "solr1");
CloseableHttpClient myHttpClient = HttpClientUtil.createClient(params);
try {
LBHttpSolrClient client = new LBHttpSolrClient(myHttpClient, s);
client.setAliveCheckInterval(500);
// Kill a server and test again
solr[1].jetty.stop();
solr[1].jetty = null;
// query the servers
for (String value : s)
client.query(new SolrQuery("*:*"));
// Start the killed server once again
solr[1].startJetty();
// Wait for the alive check to complete
waitForServer(30000, client, 3, "solr1");
} finally {
myHttpClient.close();
}
}
// wait maximum ms for serverName to come back up

View File

@ -22,6 +22,7 @@ import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.solr.SolrJettyTestBase;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrRequest;
@ -43,6 +44,7 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.InputStream;
import java.net.Socket;
@ -449,30 +451,34 @@ public class BasicHttpSolrClientTest extends SolrJettyTestBase {
@Test
public void testCompression() throws Exception {
HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/debug/foo");
SolrQuery q = new SolrQuery("*:*");
// verify request header gets set
DebugServlet.clear();
try {
client.query(q);
} catch (Throwable t) {}
assertNull(DebugServlet.headers.get("Accept-Encoding"));
client.setAllowCompression(true);
try {
client.query(q);
} catch (Throwable t) {}
assertNotNull(DebugServlet.headers.get("Accept-Encoding"));
client.setAllowCompression(false);
try {
client.query(q);
} catch (Throwable t) {}
assertNull(DebugServlet.headers.get("Accept-Encoding"));
SolrQuery q = new SolrQuery("*:*");
// verify request header gets set
DebugServlet.clear();
try {
client.query(q);
} catch (Throwable t) {}
assertNull(DebugServlet.headers.get("Accept-Encoding"));
client.setAllowCompression(true);
try {
client.query(q);
} catch (Throwable t) {}
assertNotNull(DebugServlet.headers.get("Accept-Encoding"));
client.setAllowCompression(false);
try {
client.query(q);
} catch (Throwable t) {}
assertNull(DebugServlet.headers.get("Accept-Encoding"));
} finally {
client.shutdown();
}
// verify server compresses output
HttpGet get = new HttpGet(jetty.getBaseUrl().toString() + "/collection1" +
"/select?q=foo&wt=xml");
get.setHeader("Accept-Encoding", "gzip");
HttpClient httpclient = HttpClientUtil.createClient(null);
CloseableHttpClient httpclient = HttpClientUtil.createClient(null);
HttpEntity entity = null;
try {
HttpResponse response = httpclient.execute(get);
@ -484,23 +490,28 @@ public class BasicHttpSolrClientTest extends SolrJettyTestBase {
if(entity!=null) {
entity.getContent().close();
}
httpclient.getConnectionManager().shutdown();
httpclient.close();
}
// verify compressed response can be handled
client = new HttpSolrClient(jetty.getBaseUrl().toString() + "/collection1");
client.setAllowCompression(true);
q = new SolrQuery("foo");
QueryResponse response = client.query(q);
assertEquals(0, response.getStatus());
client.shutdown();
try {
client.setAllowCompression(true);
SolrQuery q = new SolrQuery("foo");
QueryResponse response = client.query(q);
assertEquals(0, response.getStatus());
} finally {
client.shutdown();
}
}
@Test
public void testSetParametersExternalClient(){
HttpClient httpClient = HttpClientUtil.createClient(null);
public void testSetParametersExternalClient() throws IOException{
CloseableHttpClient httpClient = HttpClientUtil.createClient(null);
HttpSolrClient solrClient = new HttpSolrClient(jetty.getBaseUrl().toString(),
httpClient);
httpClient);
try {
try {
solrClient.setMaxTotalConnections(1);
fail("Operation should not succeed.");
@ -509,21 +520,27 @@ public class BasicHttpSolrClientTest extends SolrJettyTestBase {
solrClient.setDefaultMaxConnectionsPerHost(1);
fail("Operation should not succeed.");
} catch (UnsupportedOperationException e) {}
solrClient.shutdown();
httpClient.getConnectionManager().shutdown();
} finally {
solrClient.shutdown();
httpClient.close();
}
}
@Test
public void testGetRawStream() throws SolrServerException, IOException{
HttpClient client = HttpClientUtil.createClient(null);
HttpSolrClient solrClient = new HttpSolrClient(jetty.getBaseUrl().toString() + "/collection1",
client, null);
QueryRequest req = new QueryRequest();
NamedList response = solrClient.request(req);
InputStream stream = (InputStream)response.get("stream");
assertNotNull(stream);
stream.close();
client.getConnectionManager().shutdown();
CloseableHttpClient client = HttpClientUtil.createClient(null);
try {
HttpSolrClient solrClient = new HttpSolrClient(jetty.getBaseUrl().toString() + "/collection1",
client, null);
QueryRequest req = new QueryRequest();
NamedList response = solrClient.request(req);
InputStream stream = (InputStream)response.get("stream");
assertNotNull(stream);
stream.close();
} finally {
client.close();
}
}
/**
@ -599,53 +616,56 @@ public class BasicHttpSolrClientTest extends SolrJettyTestBase {
public void testQueryString() throws Exception {
HttpSolrClient client = new HttpSolrClient(jetty.getBaseUrl().toString() +
"/debug/foo");
// test without request query params
DebugServlet.clear();
client.setQueryParams(setOf("serverOnly"));
UpdateRequest req = new UpdateRequest();
setReqParamsOf(req, "serverOnly", "notServer");
try {
client.request(req);
} catch (Throwable t) {}
verifyServletState(client, req);
// test without server query params
DebugServlet.clear();
client.setQueryParams(setOf());
req = new UpdateRequest();
req.setQueryParams(setOf("requestOnly"));
setReqParamsOf(req, "requestOnly", "notRequest");
try {
client.request(req);
} catch (Throwable t) {}
verifyServletState(client, req);
// test with both request and server query params
DebugServlet.clear();
req = new UpdateRequest();
client.setQueryParams(setOf("serverOnly", "both"));
req.setQueryParams(setOf("requestOnly", "both"));
setReqParamsOf(req, "serverOnly", "requestOnly", "both", "neither");
try {
client.request(req);
} catch (Throwable t) {}
verifyServletState(client, req);
// test with both request and server query params with single stream
DebugServlet.clear();
req = new UpdateRequest();
req.add(new SolrInputDocument());
client.setQueryParams(setOf("serverOnly", "both"));
req.setQueryParams(setOf("requestOnly", "both"));
setReqParamsOf(req, "serverOnly", "requestOnly", "both", "neither");
try {
client.request(req);
} catch (Throwable t) {}
// NOTE: single stream requests send all the params
// as part of the query string. So add "neither" to the request
// so it passes the verification step.
req.setQueryParams(setOf("requestOnly", "both", "neither"));
verifyServletState(client, req);
// test without request query params
DebugServlet.clear();
client.setQueryParams(setOf("serverOnly"));
UpdateRequest req = new UpdateRequest();
setReqParamsOf(req, "serverOnly", "notServer");
try {
client.request(req);
} catch (Throwable t) {}
verifyServletState(client, req);
// test without server query params
DebugServlet.clear();
client.setQueryParams(setOf());
req = new UpdateRequest();
req.setQueryParams(setOf("requestOnly"));
setReqParamsOf(req, "requestOnly", "notRequest");
try {
client.request(req);
} catch (Throwable t) {}
verifyServletState(client, req);
// test with both request and server query params
DebugServlet.clear();
req = new UpdateRequest();
client.setQueryParams(setOf("serverOnly", "both"));
req.setQueryParams(setOf("requestOnly", "both"));
setReqParamsOf(req, "serverOnly", "requestOnly", "both", "neither");
try {
client.request(req);
} catch (Throwable t) {}
verifyServletState(client, req);
// test with both request and server query params with single stream
DebugServlet.clear();
req = new UpdateRequest();
req.add(new SolrInputDocument());
client.setQueryParams(setOf("serverOnly", "both"));
req.setQueryParams(setOf("requestOnly", "both"));
setReqParamsOf(req, "serverOnly", "requestOnly", "both", "neither");
try {
client.request(req);
} catch (Throwable t) {}
// NOTE: single stream requests send all the params
// as part of the query string. So add "neither" to the request
// so it passes the verification step.
req.setQueryParams(setOf("requestOnly", "both", "neither"));
verifyServletState(client, req);
} finally {
client.shutdown();
}
}
}

View File

@ -22,6 +22,7 @@ import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.http.client.HttpClient;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
@ -223,9 +224,13 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
params.add("distrib", "false");
QueryRequest queryRequest = new QueryRequest(params);
HttpSolrClient solrClient = new HttpSolrClient(url);
QueryResponse queryResponse = queryRequest.process(solrClient);
SolrDocumentList docList = queryResponse.getResults();
assertTrue(docList.getNumFound() == 1);
try {
QueryResponse queryResponse = queryRequest.process(solrClient);
SolrDocumentList docList = queryResponse.getResults();
assertTrue(docList.getNumFound() == 1);
} finally {
solrClient.shutdown();
}
}
// Test the deleteById routing for UpdateRequest
@ -266,9 +271,13 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
params.add("distrib", "false");
QueryRequest queryRequest = new QueryRequest(params);
HttpSolrClient solrClient = new HttpSolrClient(url);
QueryResponse queryResponse = queryRequest.process(solrClient);
SolrDocumentList docList = queryResponse.getResults();
assertTrue(docList.getNumFound() == 1);
try {
QueryResponse queryResponse = queryRequest.process(solrClient);
SolrDocumentList docList = queryResponse.getResults();
assertTrue(docList.getNumFound() == 1);
} finally {
solrClient.shutdown();
}
}
} finally {
threadedClient.shutdown();
@ -368,16 +377,21 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
private Long getNumRequests(String baseUrl, String collectionName) throws
SolrServerException, IOException {
HttpSolrClient client = new HttpSolrClient(baseUrl + "/"+ collectionName);
client.setConnectionTimeout(15000);
client.setSoTimeout(60000);
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("qt", "/admin/mbeans");
params.set("stats", "true");
params.set("key", "standard");
params.set("cat", "QUERYHANDLER");
// use generic request to avoid extra processing of queries
QueryRequest req = new QueryRequest(params);
NamedList<Object> resp = client.request(req);
NamedList<Object> resp;
try {
client.setConnectionTimeout(15000);
client.setSoTimeout(60000);
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("qt", "/admin/mbeans");
params.set("stats", "true");
params.set("key", "standard");
params.set("cat", "QUERYHANDLER");
// use generic request to avoid extra processing of queries
QueryRequest req = new QueryRequest(params);
resp = client.request(req);
} finally {
client.shutdown();
}
return (Long) resp.findRecursive("solr-mbeans", "QUERYHANDLER",
"standard", "stats", "requests");
}
@ -488,11 +502,11 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
// in the afterClass method of the base class
}
public void customHttpClientTest() {
public void customHttpClientTest() throws IOException {
CloudSolrClient solrClient = null;
ModifiableSolrParams params = new ModifiableSolrParams();
params.set(HttpClientUtil.PROP_SO_TIMEOUT, 1000);
HttpClient client = null;
CloseableHttpClient client = null;
try {
client = HttpClientUtil.createClient(params);
@ -500,7 +514,7 @@ public class CloudSolrClientTest extends AbstractFullDistribZkTestBase {
assertTrue(solrClient.getLbClient().getHttpClient() == client);
} finally {
solrClient.shutdown();
client.getConnectionManager().shutdown();
client.close();
}
}
}

View File

@ -19,6 +19,7 @@ package org.apache.solr.client.solrj.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.http.auth.AuthScope;
@ -28,6 +29,7 @@ import org.apache.http.conn.ssl.AllowAllHostnameVerifier;
import org.apache.http.conn.ssl.BrowserCompatHostnameVerifier;
import org.apache.http.conn.ssl.SSLSocketFactory;
import org.apache.http.conn.ssl.X509HostnameVerifier;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.params.HttpConnectionParams;
@ -39,9 +41,9 @@ import org.junit.Test;
public class HttpClientUtilTest {
@Test
public void testNoParamsSucceeds() {
HttpClient clien = HttpClientUtil.createClient(null);
clien.getConnectionManager().shutdown();
public void testNoParamsSucceeds() throws IOException {
CloseableHttpClient client = HttpClientUtil.createClient(null);
client.close();
}
@Test
@ -57,26 +59,29 @@ public class HttpClientUtilTest {
params.set(HttpClientUtil.PROP_SO_TIMEOUT, 42345);
params.set(HttpClientUtil.PROP_USE_RETRY, false);
DefaultHttpClient client = (DefaultHttpClient) HttpClientUtil.createClient(params);
assertEquals(12345, HttpConnectionParams.getConnectionTimeout(client.getParams()));
assertEquals(PoolingClientConnectionManager.class, client.getConnectionManager().getClass());
assertEquals(22345, ((PoolingClientConnectionManager)client.getConnectionManager()).getMaxTotal());
assertEquals(32345, ((PoolingClientConnectionManager)client.getConnectionManager()).getDefaultMaxPerRoute());
assertEquals(42345, HttpConnectionParams.getSoTimeout(client.getParams()));
assertEquals(HttpClientUtil.NO_RETRY, client.getHttpRequestRetryHandler());
assertEquals("pass", client.getCredentialsProvider().getCredentials(new AuthScope("127.0.0.1", 1234)).getPassword());
assertEquals("user", client.getCredentialsProvider().getCredentials(new AuthScope("127.0.0.1", 1234)).getUserPrincipal().getName());
assertEquals(true, client.getParams().getParameter(ClientPNames.HANDLE_REDIRECTS));
client.getConnectionManager().shutdown();
try {
assertEquals(12345, HttpConnectionParams.getConnectionTimeout(client.getParams()));
assertEquals(PoolingClientConnectionManager.class, client.getConnectionManager().getClass());
assertEquals(22345, ((PoolingClientConnectionManager)client.getConnectionManager()).getMaxTotal());
assertEquals(32345, ((PoolingClientConnectionManager)client.getConnectionManager()).getDefaultMaxPerRoute());
assertEquals(42345, HttpConnectionParams.getSoTimeout(client.getParams()));
assertEquals(HttpClientUtil.NO_RETRY, client.getHttpRequestRetryHandler());
assertEquals("pass", client.getCredentialsProvider().getCredentials(new AuthScope("127.0.0.1", 1234)).getPassword());
assertEquals("user", client.getCredentialsProvider().getCredentials(new AuthScope("127.0.0.1", 1234)).getUserPrincipal().getName());
assertEquals(true, client.getParams().getParameter(ClientPNames.HANDLE_REDIRECTS));
} finally {
client.close();
}
}
@Test
public void testReplaceConfigurer(){
public void testReplaceConfigurer() throws IOException{
try {
final AtomicInteger counter = new AtomicInteger();
HttpClientConfigurer custom = new HttpClientConfigurer(){
@Override
protected void configure(DefaultHttpClient httpClient, SolrParams config) {
public void configure(DefaultHttpClient httpClient, SolrParams config) {
super.configure(httpClient, config);
counter.set(config.getInt("custom-param", -1));
}
@ -87,7 +92,7 @@ public class HttpClientUtilTest {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("custom-param", 5);
HttpClientUtil.createClient(params).getConnectionManager().shutdown();
HttpClientUtil.createClient(params).close();
assertEquals(5, counter.get());
} finally {
//restore default configurer
@ -98,26 +103,36 @@ public class HttpClientUtilTest {
@Test
@SuppressWarnings("deprecation")
public void testSSLSystemProperties() {
public void testSSLSystemProperties() throws IOException {
CloseableHttpClient client = HttpClientUtil.createClient(null);
try {
SSLTestConfig.setSSLSystemProperties();
assertNotNull("HTTPS scheme could not be created using the javax.net.ssl.* system properties.",
HttpClientUtil.createClient(null).getConnectionManager().getSchemeRegistry().get("https"));
client.getConnectionManager().getSchemeRegistry().get("https"));
System.clearProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME);
assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(HttpClientUtil.createClient(null)).getClass());
client.close();
client = HttpClientUtil.createClient(null);
assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(client).getClass());
System.setProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME, "true");
assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(HttpClientUtil.createClient(null)).getClass());
client.close();
client = HttpClientUtil.createClient(null);
assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(client).getClass());
System.setProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME, "");
assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(HttpClientUtil.createClient(null)).getClass());
client.close();
client = HttpClientUtil.createClient(null);
assertEquals(BrowserCompatHostnameVerifier.class, getHostnameVerifier(client).getClass());
System.setProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME, "false");
assertEquals(AllowAllHostnameVerifier.class, getHostnameVerifier(HttpClientUtil.createClient(null)).getClass());
client.close();
client = HttpClientUtil.createClient(null);
assertEquals(AllowAllHostnameVerifier.class, getHostnameVerifier(client).getClass());
} finally {
SSLTestConfig.clearSSLSystemProperties();
System.clearProperty(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME);
client.close();
}
}

View File

@ -3,15 +3,16 @@
*/
package org.apache.solr.client.solrj.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.io.IOException;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.solr.client.solrj.ResponseParser;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.junit.Test;
import java.net.MalformedURLException;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@ -38,19 +39,30 @@ public class LBHttpSolrClientTest {
* Test method for {@link LBHttpSolrClient#LBHttpSolrClient(org.apache.http.client.HttpClient, org.apache.solr.client.solrj.ResponseParser, java.lang.String[])}.
*
* Validate that the parser passed in is used in the <code>HttpSolrClient</code> instances created.
*
* @throws MalformedURLException If URL is invalid, no URL passed, so won't happen.
*/
@Test
public void testLBHttpSolrClientHttpClientResponseParserStringArray() throws MalformedURLException {
LBHttpSolrClient testClient = new LBHttpSolrClient(HttpClientUtil.createClient(new ModifiableSolrParams()), (ResponseParser) null);
public void testLBHttpSolrClientHttpClientResponseParserStringArray() throws IOException {
CloseableHttpClient httpClient = HttpClientUtil.createClient(new ModifiableSolrParams());
LBHttpSolrClient testClient = new LBHttpSolrClient(httpClient, (ResponseParser) null);
HttpSolrClient httpSolrClient = testClient.makeSolrClient("http://127.0.0.1:8080");
assertNull("Generated server should have null parser.", httpSolrClient.getParser());
ResponseParser parser = new BinaryResponseParser();
testClient = new LBHttpSolrClient(HttpClientUtil.createClient(new ModifiableSolrParams()), parser);
httpSolrClient = testClient.makeSolrClient("http://127.0.0.1:8080");
assertEquals("Invalid parser passed to generated server.", parser, httpSolrClient.getParser());
try {
assertNull("Generated server should have null parser.", httpSolrClient.getParser());
} finally {
httpSolrClient.shutdown();
testClient.shutdown();
httpClient.close();
}
try {
ResponseParser parser = new BinaryResponseParser();
httpClient = HttpClientUtil.createClient(new ModifiableSolrParams());
testClient = new LBHttpSolrClient(httpClient, parser);
httpSolrClient = testClient.makeSolrClient("http://127.0.0.1:8080");
assertEquals("Invalid parser passed to generated server.", parser, httpSolrClient.getParser());
} finally {
httpSolrClient.shutdown();
testClient.shutdown();
httpClient.close();
}
}
}

View File

@ -340,8 +340,8 @@ public abstract class BaseDistributedSearchTestCase extends SolrTestCaseJ4 {
}
protected void destroyServers() throws Exception {
controlJetty.stop();
((HttpSolrClient) controlClient).shutdown();
if (controlJetty != null) controlJetty.stop();
if (controlClient != null) ((HttpSolrClient) controlClient).shutdown();
for (JettySolrRunner jetty : jettys) jetty.stop();
for (SolrClient client : clients) ((HttpSolrClient) client).shutdown();
clients.clear();

View File

@ -17,6 +17,11 @@ package org.apache.solr;
* limitations under the License.
*/
import java.io.File;
import java.io.OutputStreamWriter;
import java.util.Properties;
import java.util.SortedMap;
import org.apache.commons.io.FileUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.solr.client.solrj.SolrClient;
@ -30,11 +35,6 @@ import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.OutputStreamWriter;
import java.util.Properties;
import java.util.SortedMap;
abstract public class SolrJettyTestBase extends SolrTestCaseJ4
{

View File

@ -17,9 +17,40 @@
package org.apache.solr;
import com.carrotsearch.randomizedtesting.RandomizedContext;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
import static com.google.common.base.Preconditions.checkNotNull;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.net.URL;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.logging.ConsoleHandler;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.regex.Pattern;
import javax.xml.xpath.XPathExpressionException;
import org.apache.commons.codec.Charsets;
import org.apache.commons.io.FileUtils;
import org.apache.lucene.analysis.MockAnalyzer;
@ -40,10 +71,10 @@ import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.SolrInputField;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.ObjectReleaseTracker;
import org.apache.solr.common.util.XML;
import org.apache.solr.core.ConfigSolr;
import org.apache.solr.core.ConfigSolrXmlOld;
@ -78,38 +109,9 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xml.sax.SAXException;
import javax.xml.xpath.XPathExpressionException;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.io.StringWriter;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.net.URL;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.logging.ConsoleHandler;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.regex.Pattern;
import static com.google.common.base.Preconditions.checkNotNull;
import com.carrotsearch.randomizedtesting.RandomizedContext;
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule;
/**
* A junit4 Solr test harness that extends LuceneTestCaseJ4. To change which core is used when loading the schema and solrconfig.xml, simply
@ -165,7 +167,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
System.setProperty("solr.clustering.enabled", "false");
setupLogging();
startTrackingSearchers();
startTrackingZkClients();
ignoreException("ignore_exception");
newRandomConfig();
@ -185,7 +186,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
deleteCore();
resetExceptionIgnores();
endTrackingSearchers();
endTrackingZkClients();
assertTrue("Some resources were not closed, shutdown, or released.", ObjectReleaseTracker.clearObjectTrackerAndCheckEmpty());
resetFactory();
coreName = ConfigSolrXmlOld.DEFAULT_DEFAULT_CORE_NAME;
} finally {
@ -410,12 +411,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
numOpens = numCloses = 0;
}
}
static long zkClientNumOpens;
static long zkClientNumCloses;
public static void startTrackingZkClients() {
zkClientNumOpens = SolrZkClient.numOpens.get();
zkClientNumCloses = SolrZkClient.numCloses.get();
}
public static void endTrackingSearchers() {
long endNumOpens = SolrIndexSearcher.numOpens.get();
@ -449,20 +444,6 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase {
}
}
public static void endTrackingZkClients() {
long endNumOpens = SolrZkClient.numOpens.get();
long endNumCloses = SolrZkClient.numCloses.get();
SolrZkClient.numOpens.getAndSet(0);
SolrZkClient.numCloses.getAndSet(0);
if (endNumOpens-zkClientNumOpens != endNumCloses-zkClientNumCloses) {
String msg = "ERROR: SolrZkClient opens=" + (endNumOpens-zkClientNumOpens) + " closes=" + (endNumCloses-zkClientNumCloses);
log.error(msg);
fail(msg);
}
}
/** Causes an exception matching the regex pattern to not be logged. */
public static void ignoreException(String pattern) {
if (SolrException.ignorePatterns == null)

View File

@ -265,6 +265,8 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
if (defaultCollection != null) client.setDefaultCollection(defaultCollection);
client.getLbClient().getHttpClient().getParams()
.setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, 30000);
client.getLbClient().getHttpClient().getParams()
.setParameter(CoreConnectionPNames.SO_TIMEOUT, 60000);
return client;
}
@ -1475,7 +1477,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
retry = false;
}
cnt++;
if (cnt > 20) break;
if (cnt > 30) break;
Thread.sleep(2000);
} while (retry);
}
@ -1498,7 +1500,12 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
Set<String> theShards = shardToJetty.keySet();
String failMessage = null;
for (String shard : theShards) {
failMessage = checkShardConsistency(shard, true, false);
try {
failMessage = checkShardConsistency(shard, true, true);
} catch (Exception e) {
// we might hit a node we just stopped
failMessage="hit exception:" + e.getMessage();
}
}
if (failMessage != null) {
@ -1508,7 +1515,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
retry = false;
}
cnt++;
if (cnt > 20) break;
if (cnt > 40) break;
Thread.sleep(2000);
} while (retry);
}
@ -1574,8 +1581,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
log.error("", e);
}
}
clients.clear();
jettys.clear();
super.destroyServers();
}
protected CollectionAdminResponse createCollection(String collectionName, int numShards, int replicationFactor, int maxShardsPerNode) throws SolrServerException, IOException {
@ -1857,10 +1863,14 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
baseUrl = baseUrl.substring(0, baseUrl.length() - "collection1".length());
HttpSolrClient baseClient = new HttpSolrClient(baseUrl);
baseClient.setConnectionTimeout(15000);
baseClient.setSoTimeout(60000 * 5);
NamedList r = baseClient.request(request);
baseClient.shutdown();
NamedList r;
try {
baseClient.setConnectionTimeout(15000);
baseClient.setSoTimeout(60000 * 5);
r = baseClient.request(request);
} finally {
baseClient.shutdown();
}
return r;
}
@ -1905,7 +1915,8 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes
assertNotNull("No Slice for "+shardId, shard);
allReplicasUp = true; // assume true
Collection<Replica> replicas = shard.getReplicas();
assertTrue(replicas.size() == rf);
assertTrue("Did not find correct number of replicas. Expected:" + rf + " Found:" + replicas.size(), replicas.size() == rf);
leader = shard.getLeader();
assertNotNull(leader);
log.info("Found "+replicas.size()+" replicas and leader on "+

View File

@ -218,9 +218,11 @@ public class ChaosMonkey {
if (filter != null) {
CoreContainer cores = ((SolrDispatchFilter) filter).getCores();
if (cores != null) {
int zklocalport = ((InetSocketAddress) cores.getZkController()
.getZkClient().getSolrZooKeeper().getSocketAddress()).getPort();
IpTables.blockPort(zklocalport);
if (cores.isZooKeeperAware()) {
int zklocalport = ((InetSocketAddress) cores.getZkController()
.getZkClient().getSolrZooKeeper().getSocketAddress()).getPort();
IpTables.blockPort(zklocalport);
}
}
}
}
@ -591,9 +593,11 @@ public class ChaosMonkey {
if (filter != null) {
CoreContainer cores = ((SolrDispatchFilter) filter).getCores();
if (cores != null) {
int zklocalport = ((InetSocketAddress) cores.getZkController()
.getZkClient().getSolrZooKeeper().getSocketAddress()).getPort();
IpTables.unblockPort(zklocalport);
if (cores.isZooKeeperAware()) {
int zklocalport = ((InetSocketAddress) cores.getZkController()
.getZkClient().getSolrZooKeeper().getSocketAddress()).getPort();
IpTables.unblockPort(zklocalport);
}
}
}
}

View File

@ -40,7 +40,10 @@ abstract public class RestTestBase extends SolrJettyTestBase {
protected static RestTestHarness restTestHarness;
@AfterClass
public static void cleanUpHarness() {
public static void cleanUpHarness() throws IOException {
if (restTestHarness != null) {
restTestHarness.close();
}
restTestHarness = null;
}

View File

@ -16,6 +16,7 @@ package org.apache.solr.util;
* limitations under the License.
*/
import java.io.Closeable;
import java.io.IOException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
@ -24,7 +25,6 @@ import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import org.apache.http.HttpEntity;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
@ -32,6 +32,7 @@ import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.util.EntityUtils;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.common.params.ModifiableSolrParams;
@ -39,9 +40,9 @@ import org.apache.solr.common.params.ModifiableSolrParams;
/**
* Facilitates testing Solr's REST API via a provided embedded Jetty
*/
public class RestTestHarness extends BaseTestHarness {
public class RestTestHarness extends BaseTestHarness implements Closeable {
private RESTfulServerProvider serverProvider;
private HttpClient httpClient = HttpClientUtil.createClient(new
private CloseableHttpClient httpClient = HttpClientUtil.createClient(new
ModifiableSolrParams());
public RestTestHarness(RESTfulServerProvider serverProvider) {
@ -195,4 +196,9 @@ public class RestTestHarness extends BaseTestHarness {
EntityUtils.consumeQuietly(entity);
}
}
@Override
public void close() throws IOException {
httpClient.close();
}
}

View File

@ -90,7 +90,7 @@ public class SSLTestConfig extends SSLConfig {
private class SSLHttpClientConfigurer extends HttpClientConfigurer {
@SuppressWarnings("deprecation")
protected void configure(DefaultHttpClient httpClient, SolrParams config) {
public void configure(DefaultHttpClient httpClient, SolrParams config) {
super.configure(httpClient, config);
SchemeRegistry registry = httpClient.getConnectionManager().getSchemeRegistry();
// Make sure no tests cheat by using HTTP