mirror of https://github.com/apache/lucene.git
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/lucene-solr
This commit is contained in:
commit
78a57f75cb
|
@ -35,7 +35,7 @@ public final class TotalHits {
|
||||||
*/
|
*/
|
||||||
EQUAL_TO,
|
EQUAL_TO,
|
||||||
/**
|
/**
|
||||||
* The total hit count is greater than or eual to {@link TotalHits#value}.
|
* The total hit count is greater than or equal to {@link TotalHits#value}.
|
||||||
*/
|
*/
|
||||||
GREATER_THAN_OR_EQUAL_TO
|
GREATER_THAN_OR_EQUAL_TO
|
||||||
}
|
}
|
||||||
|
|
|
@ -86,6 +86,8 @@ Improvements
|
||||||
|
|
||||||
* SOLR-12121: JWT Token authentication plugin with OpenID Connect implicit flow login through Admin UI (janhoy)
|
* SOLR-12121: JWT Token authentication plugin with OpenID Connect implicit flow login through Admin UI (janhoy)
|
||||||
|
|
||||||
|
* SOLR-13227: Optimizing facet.range.other by avoiding expensive exceptions (Nikolay Khitrin via Mikhail Khludnev)
|
||||||
|
|
||||||
Other Changes
|
Other Changes
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
@ -105,6 +107,10 @@ Other Changes
|
||||||
|
|
||||||
* SOLR-13060: Improve HdfsAutoAddReplicasIntegrationTest and HdfsCollectionsAPIDistributedZkTest (Kevin Risden)
|
* SOLR-13060: Improve HdfsAutoAddReplicasIntegrationTest and HdfsCollectionsAPIDistributedZkTest (Kevin Risden)
|
||||||
|
|
||||||
|
* SOLR-13074: MoveReplicaHDFSTest leaks threads, falls into an endless loop, logging like crazy (Kevin Risden)
|
||||||
|
|
||||||
|
* SOLR-9762: Remove the workaround implemented for HADOOP-13346 (Kevin Risden)
|
||||||
|
|
||||||
================== 8.0.0 ==================
|
================== 8.0.0 ==================
|
||||||
|
|
||||||
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
|
||||||
|
|
|
@ -178,10 +178,8 @@ public class RangeFacetProcessor extends SimpleFacets {
|
||||||
IntervalFacets.FacetInterval after = null;
|
IntervalFacets.FacetInterval after = null;
|
||||||
|
|
||||||
for (RangeFacetRequest.FacetRange range : rfr.getFacetRanges()) {
|
for (RangeFacetRequest.FacetRange range : rfr.getFacetRanges()) {
|
||||||
try {
|
if (range.other != null) {
|
||||||
FacetRangeOther other = FacetRangeOther.get(range.name);
|
switch (range.other) {
|
||||||
if (other != null) {
|
|
||||||
switch (other) {
|
|
||||||
case BEFORE:
|
case BEFORE:
|
||||||
assert range.lower == null;
|
assert range.lower == null;
|
||||||
intervals.set(0, new IntervalFacets.FacetInterval(sf, "*", range.upper, range.includeLower,
|
intervals.set(0, new IntervalFacets.FacetInterval(sf, "*", range.upper, range.includeLower,
|
||||||
|
@ -200,14 +198,10 @@ public class RangeFacetProcessor extends SimpleFacets {
|
||||||
case NONE:
|
case NONE:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
} else {
|
||||||
continue;
|
|
||||||
} catch (SolrException e) {
|
|
||||||
// safe to ignore
|
|
||||||
}
|
|
||||||
|
|
||||||
intervals.add(new IntervalFacets.FacetInterval(sf, range.lower, range.upper, range.includeLower, range.includeUpper, range.lower));
|
intervals.add(new IntervalFacets.FacetInterval(sf, range.lower, range.upper, range.includeLower, range.includeUpper, range.lower));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (includeAfter) {
|
if (includeAfter) {
|
||||||
assert after != null;
|
assert after != null;
|
||||||
|
|
|
@ -20,7 +20,6 @@ import static org.apache.solr.security.RequestContinuesRecorderAuthenticationHan
|
||||||
import static org.apache.solr.security.HadoopAuthFilter.DELEGATION_TOKEN_ZK_CLIENT;
|
import static org.apache.solr.security.HadoopAuthFilter.DELEGATION_TOKEN_ZK_CLIENT;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.PrintWriter;
|
|
||||||
import java.lang.invoke.MethodHandles;
|
import java.lang.invoke.MethodHandles;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -37,15 +36,15 @@ import javax.servlet.ServletRequest;
|
||||||
import javax.servlet.ServletResponse;
|
import javax.servlet.ServletResponse;
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
import javax.servlet.http.HttpServletResponse;
|
||||||
import javax.servlet.http.HttpServletResponseWrapper;
|
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonGenerator;
|
||||||
import org.apache.commons.collections.iterators.IteratorEnumeration;
|
import org.apache.commons.collections.iterators.IteratorEnumeration;
|
||||||
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
|
||||||
|
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationHandler;
|
||||||
import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder;
|
import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder;
|
||||||
import org.apache.solr.cloud.ZkController;
|
import org.apache.solr.cloud.ZkController;
|
||||||
import org.apache.solr.common.SolrException;
|
import org.apache.solr.common.SolrException;
|
||||||
import org.apache.solr.common.SolrException.ErrorCode;
|
import org.apache.solr.common.SolrException.ErrorCode;
|
||||||
import org.apache.solr.common.util.SuppressForbidden;
|
|
||||||
import org.apache.solr.core.CoreContainer;
|
import org.apache.solr.core.CoreContainer;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
@ -189,6 +188,10 @@ public class HadoopAuthPlugin extends AuthenticationPlugin {
|
||||||
// Configure proxy user settings.
|
// Configure proxy user settings.
|
||||||
params.putAll(proxyUserConfigs);
|
params.putAll(proxyUserConfigs);
|
||||||
|
|
||||||
|
// Needed to work around HADOOP-13346
|
||||||
|
params.put(DelegationTokenAuthenticationHandler.JSON_MAPPER_PREFIX + JsonGenerator.Feature.AUTO_CLOSE_TARGET,
|
||||||
|
"false");
|
||||||
|
|
||||||
final ServletContext servletContext = new AttributeOnlyServletContext();
|
final ServletContext servletContext = new AttributeOnlyServletContext();
|
||||||
log.info("Params: "+params);
|
log.info("Params: "+params);
|
||||||
|
|
||||||
|
@ -244,20 +247,7 @@ public class HadoopAuthPlugin extends AuthenticationPlugin {
|
||||||
log.info("-------------------------------");
|
log.info("-------------------------------");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Workaround until HADOOP-13346 is fixed.
|
authFilter.doFilter(request, frsp, filterChain);
|
||||||
HttpServletResponse rspCloseShield = new HttpServletResponseWrapper(frsp) {
|
|
||||||
@SuppressForbidden(reason = "Hadoop DelegationTokenAuthenticationFilter uses response writer, this" +
|
|
||||||
"is providing a CloseShield on top of that")
|
|
||||||
@Override
|
|
||||||
public PrintWriter getWriter() throws IOException {
|
|
||||||
final PrintWriter pw = new PrintWriterWrapper(frsp.getWriter()) {
|
|
||||||
@Override
|
|
||||||
public void close() {};
|
|
||||||
};
|
|
||||||
return pw;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
authFilter.doFilter(request, rspCloseShield, filterChain);
|
|
||||||
|
|
||||||
switch (frsp.getStatus()) {
|
switch (frsp.getStatus()) {
|
||||||
case HttpServletResponse.SC_UNAUTHORIZED:
|
case HttpServletResponse.SC_UNAUTHORIZED:
|
||||||
|
|
|
@ -16,8 +16,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.solr.security;
|
package org.apache.solr.security;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.PrintWriter;
|
|
||||||
import java.lang.invoke.MethodHandles;
|
import java.lang.invoke.MethodHandles;
|
||||||
import java.util.Enumeration;
|
import java.util.Enumeration;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -30,11 +28,11 @@ import javax.servlet.ServletContext;
|
||||||
import javax.servlet.ServletException;
|
import javax.servlet.ServletException;
|
||||||
import javax.servlet.ServletRequest;
|
import javax.servlet.ServletRequest;
|
||||||
import javax.servlet.ServletResponse;
|
import javax.servlet.ServletResponse;
|
||||||
import javax.servlet.http.HttpServletResponse;
|
|
||||||
import javax.servlet.http.HttpServletResponseWrapper;
|
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.core.JsonGenerator;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import org.apache.commons.collections.iterators.IteratorEnumeration;
|
import org.apache.commons.collections.iterators.IteratorEnumeration;
|
||||||
|
import org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticationHandler;
|
||||||
import org.apache.solr.client.solrj.impl.Http2SolrClient;
|
import org.apache.solr.client.solrj.impl.Http2SolrClient;
|
||||||
import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder;
|
import org.apache.solr.client.solrj.impl.Krb5HttpClientBuilder;
|
||||||
import org.apache.solr.client.solrj.impl.SolrHttpClientBuilder;
|
import org.apache.solr.client.solrj.impl.SolrHttpClientBuilder;
|
||||||
|
@ -42,7 +40,6 @@ import org.apache.solr.cloud.ZkController;
|
||||||
import org.apache.solr.common.SolrException;
|
import org.apache.solr.common.SolrException;
|
||||||
import org.apache.solr.common.SolrException.ErrorCode;
|
import org.apache.solr.common.SolrException.ErrorCode;
|
||||||
import org.apache.solr.common.cloud.SecurityAwareZkACLProvider;
|
import org.apache.solr.common.cloud.SecurityAwareZkACLProvider;
|
||||||
import org.apache.solr.common.util.SuppressForbidden;
|
|
||||||
import org.apache.solr.core.CoreContainer;
|
import org.apache.solr.core.CoreContainer;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
@ -166,6 +163,11 @@ public class KerberosPlugin extends AuthenticationPlugin implements HttpClientBu
|
||||||
params.put(key, System.getProperty(key));
|
params.put(key, System.getProperty(key));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Needed to work around HADOOP-13346
|
||||||
|
params.put(DelegationTokenAuthenticationHandler.JSON_MAPPER_PREFIX + JsonGenerator.Feature.AUTO_CLOSE_TARGET,
|
||||||
|
"false");
|
||||||
|
|
||||||
final ServletContext servletContext = new AttributeOnlyServletContext();
|
final ServletContext servletContext = new AttributeOnlyServletContext();
|
||||||
if (controller != null) {
|
if (controller != null) {
|
||||||
servletContext.setAttribute(DELEGATION_TOKEN_ZK_CLIENT, controller.getZkClient());
|
servletContext.setAttribute(DELEGATION_TOKEN_ZK_CLIENT, controller.getZkClient());
|
||||||
|
@ -223,25 +225,7 @@ public class KerberosPlugin extends AuthenticationPlugin implements HttpClientBu
|
||||||
public boolean doAuthenticate(ServletRequest req, ServletResponse rsp,
|
public boolean doAuthenticate(ServletRequest req, ServletResponse rsp,
|
||||||
FilterChain chain) throws Exception {
|
FilterChain chain) throws Exception {
|
||||||
log.debug("Request to authenticate using kerberos: "+req);
|
log.debug("Request to authenticate using kerberos: "+req);
|
||||||
|
kerberosFilter.doFilter(req, rsp, chain);
|
||||||
final HttpServletResponse frsp = (HttpServletResponse)rsp;
|
|
||||||
|
|
||||||
// kerberosFilter may close the stream and write to closed streams,
|
|
||||||
// see HADOOP-13346. To work around, pass a PrintWriter that ignores
|
|
||||||
// closes
|
|
||||||
HttpServletResponse rspCloseShield = new HttpServletResponseWrapper(frsp) {
|
|
||||||
@SuppressForbidden(reason = "Hadoop DelegationTokenAuthenticationFilter uses response writer, this" +
|
|
||||||
"is providing a CloseShield on top of that")
|
|
||||||
@Override
|
|
||||||
public PrintWriter getWriter() throws IOException {
|
|
||||||
final PrintWriter pw = new PrintWriterWrapper(frsp.getWriter()) {
|
|
||||||
@Override
|
|
||||||
public void close() {};
|
|
||||||
};
|
|
||||||
return pw;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
kerberosFilter.doFilter(req, rspCloseShield, chain);
|
|
||||||
String requestContinuesAttr = (String)req.getAttribute(RequestContinuesRecorderAuthenticationHandler.REQUEST_CONTINUES_ATTR);
|
String requestContinuesAttr = (String)req.getAttribute(RequestContinuesRecorderAuthenticationHandler.REQUEST_CONTINUES_ATTR);
|
||||||
if (requestContinuesAttr == null) {
|
if (requestContinuesAttr == null) {
|
||||||
log.warn("Could not find " + RequestContinuesRecorderAuthenticationHandler.REQUEST_CONTINUES_ATTR);
|
log.warn("Could not find " + RequestContinuesRecorderAuthenticationHandler.REQUEST_CONTINUES_ATTR);
|
||||||
|
|
|
@ -40,8 +40,7 @@ import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
@ThreadLeakFilters(defaultFilters = true, filters = {
|
@ThreadLeakFilters(defaultFilters = true, filters = {
|
||||||
BadHdfsThreadsFilter.class, // hdfs currently leaks thread(s)
|
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
|
||||||
MoveReplicaHDFSTest.ForkJoinThreadsFilter.class
|
|
||||||
})
|
})
|
||||||
@Nightly // test is too long for non nightly
|
@Nightly // test is too long for non nightly
|
||||||
public class MoveReplicaHDFSFailoverTest extends SolrCloudTestCase {
|
public class MoveReplicaHDFSFailoverTest extends SolrCloudTestCase {
|
||||||
|
|
|
@ -16,75 +16,51 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.solr.cloud;
|
package org.apache.solr.cloud;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.ThreadFilter;
|
|
||||||
import com.carrotsearch.randomizedtesting.annotations.Nightly;
|
import com.carrotsearch.randomizedtesting.annotations.Nightly;
|
||||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
|
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
|
||||||
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
|
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.lucene.util.IOUtils;
|
import org.apache.lucene.util.LuceneTestCase.Slow;
|
||||||
import org.apache.lucene.util.LuceneTestCase.AwaitsFix;
|
|
||||||
import org.apache.lucene.util.TimeUnits;
|
import org.apache.lucene.util.TimeUnits;
|
||||||
import org.apache.solr.cloud.hdfs.HdfsTestUtil;
|
import org.apache.solr.cloud.hdfs.HdfsTestUtil;
|
||||||
import org.apache.solr.common.cloud.ZkConfigManager;
|
|
||||||
import org.apache.solr.util.BadHdfsThreadsFilter;
|
import org.apache.solr.util.BadHdfsThreadsFilter;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
/**
|
@Slow
|
||||||
*
|
@Nightly
|
||||||
*/
|
|
||||||
@ThreadLeakFilters(defaultFilters = true, filters = {
|
@ThreadLeakFilters(defaultFilters = true, filters = {
|
||||||
BadHdfsThreadsFilter.class, // hdfs currently leaks thread(s)
|
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
|
||||||
MoveReplicaHDFSTest.ForkJoinThreadsFilter.class
|
|
||||||
})
|
})
|
||||||
@Nightly // test is too long for non nightly
|
|
||||||
@TimeoutSuite(millis = TimeUnits.HOUR)
|
@TimeoutSuite(millis = TimeUnits.HOUR)
|
||||||
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13060")
|
|
||||||
public class MoveReplicaHDFSTest extends MoveReplicaTest {
|
public class MoveReplicaHDFSTest extends MoveReplicaTest {
|
||||||
|
|
||||||
private static MiniDFSCluster dfsCluster;
|
private static MiniDFSCluster dfsCluster;
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void setupClass() throws Exception {
|
public static void setupClass() throws Exception {
|
||||||
System.setProperty("solr.hdfs.blockcache.enabled", "false");
|
System.setProperty("solr.hdfs.blockcache.blocksperbank", "512");
|
||||||
|
System.setProperty("tests.hdfs.numdatanodes", "1");
|
||||||
dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath());
|
dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath());
|
||||||
|
|
||||||
ZkConfigManager configManager = new ZkConfigManager(zkClient());
|
|
||||||
configManager.uploadConfigDir(configset("cloud-hdfs"), "conf1");
|
|
||||||
|
|
||||||
System.setProperty("solr.hdfs.home", HdfsTestUtil.getDataDir(dfsCluster, "data"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void teardownClass() throws Exception {
|
public static void teardownClass() throws Exception {
|
||||||
try {
|
try {
|
||||||
IOUtils.close(
|
HdfsTestUtil.teardownClass(dfsCluster);
|
||||||
() -> {
|
|
||||||
try {
|
|
||||||
if (cluster != null) cluster.shutdown();
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new IOException("Could not shut down the cluster.", e);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
() -> {
|
|
||||||
try {
|
|
||||||
if (dfsCluster != null) HdfsTestUtil.teardownClass(dfsCluster);
|
|
||||||
} catch (Exception e) {
|
|
||||||
throw new IOException("Could not shut down dfs cluster.", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
} finally {
|
} finally {
|
||||||
cluster = null;
|
|
||||||
dfsCluster = null;
|
dfsCluster = null;
|
||||||
|
System.setProperty("solr.hdfs.blockcache.blocksperbank", "512");
|
||||||
|
System.setProperty("tests.hdfs.numdatanodes", "1");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getConfigSet() {
|
||||||
|
return "cloud-hdfs";
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
// 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") //2018-03-10
|
|
||||||
public void testNormalMove() throws Exception {
|
public void testNormalMove() throws Exception {
|
||||||
inPlaceMove = false;
|
inPlaceMove = false;
|
||||||
test();
|
test();
|
||||||
|
@ -108,13 +84,5 @@ public class MoveReplicaHDFSTest extends MoveReplicaTest {
|
||||||
public void testFailedMove() throws Exception {
|
public void testFailedMove() throws Exception {
|
||||||
super.testFailedMove();
|
super.testFailedMove();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class ForkJoinThreadsFilter implements ThreadFilter {
|
|
||||||
@Override
|
|
||||||
public boolean reject(Thread t) {
|
|
||||||
String name = t.getName();
|
|
||||||
return name.startsWith("ForkJoinPool.commonPool");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,6 @@ import java.lang.invoke.MethodHandles;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -41,46 +40,38 @@ import org.apache.solr.common.SolrInputDocument;
|
||||||
import org.apache.solr.common.cloud.DocCollection;
|
import org.apache.solr.common.cloud.DocCollection;
|
||||||
import org.apache.solr.common.cloud.Replica;
|
import org.apache.solr.common.cloud.Replica;
|
||||||
import org.apache.solr.common.cloud.Slice;
|
import org.apache.solr.common.cloud.Slice;
|
||||||
import org.apache.solr.common.params.CollectionParams;
|
|
||||||
import org.apache.solr.common.params.ModifiableSolrParams;
|
|
||||||
import org.apache.solr.common.params.SolrParams;
|
|
||||||
import org.apache.solr.common.util.NamedList;
|
import org.apache.solr.common.util.NamedList;
|
||||||
import org.apache.solr.util.IdUtils;
|
import org.apache.solr.util.IdUtils;
|
||||||
import org.apache.solr.util.LogLevel;
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.BeforeClass;
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
@LogLevel("org.apache.solr.cloud=DEBUG;org.apache.solr.cloud.autoscaling=DEBUG;")
|
|
||||||
public class MoveReplicaTest extends SolrCloudTestCase {
|
public class MoveReplicaTest extends SolrCloudTestCase {
|
||||||
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
|
||||||
|
|
||||||
// used by MoveReplicaHDFSTest
|
// used by MoveReplicaHDFSTest
|
||||||
protected boolean inPlaceMove = true;
|
protected boolean inPlaceMove = true;
|
||||||
|
|
||||||
@BeforeClass
|
protected String getConfigSet() {
|
||||||
public static void setupCluster() throws Exception {
|
return "cloud-dynamic";
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
protected String getSolrXml() {
|
|
||||||
return "solr.xml";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void beforeTest() throws Exception {
|
public void beforeTest() throws Exception {
|
||||||
inPlaceMove = true;
|
inPlaceMove = true;
|
||||||
|
|
||||||
configureCluster(4)
|
configureCluster(4)
|
||||||
.addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-dynamic").resolve("conf"))
|
.addConfig("conf1", configset(getConfigSet()))
|
||||||
|
.addConfig("conf2", configset(getConfigSet()))
|
||||||
|
.withSolrXml(TEST_PATH().resolve("solr.xml"))
|
||||||
.configure();
|
.configure();
|
||||||
|
|
||||||
NamedList<Object> overSeerStatus = cluster.getSolrClient().request(CollectionAdminRequest.getOverseerStatus());
|
NamedList<Object> overSeerStatus = cluster.getSolrClient().request(CollectionAdminRequest.getOverseerStatus());
|
||||||
JettySolrRunner overseerJetty = null;
|
JettySolrRunner overseerJetty = null;
|
||||||
String overseerLeader = (String) overSeerStatus.get("leader");
|
String overseerLeader = (String) overSeerStatus.get("leader");
|
||||||
for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) {
|
for (JettySolrRunner jetty : cluster.getJettySolrRunners()) {
|
||||||
JettySolrRunner jetty = cluster.getJettySolrRunner(i);
|
|
||||||
if (jetty.getNodeName().equals(overseerLeader)) {
|
if (jetty.getNodeName().equals(overseerLeader)) {
|
||||||
overseerJetty = jetty;
|
overseerJetty = jetty;
|
||||||
break;
|
break;
|
||||||
|
@ -93,7 +84,11 @@ public class MoveReplicaTest extends SolrCloudTestCase {
|
||||||
|
|
||||||
@After
|
@After
|
||||||
public void afterTest() throws Exception {
|
public void afterTest() throws Exception {
|
||||||
cluster.shutdown();
|
try {
|
||||||
|
shutdownCluster();
|
||||||
|
} finally {
|
||||||
|
super.tearDown();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -147,7 +142,7 @@ public class MoveReplicaTest extends SolrCloudTestCase {
|
||||||
success = true;
|
success = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
assertFalse(rsp.getRequestStatus() == RequestStatusState.FAILED);
|
assertNotSame(rsp.getRequestStatus(), RequestStatusState.FAILED);
|
||||||
Thread.sleep(500);
|
Thread.sleep(500);
|
||||||
}
|
}
|
||||||
assertTrue(success);
|
assertTrue(success);
|
||||||
|
@ -292,7 +287,7 @@ public class MoveReplicaTest extends SolrCloudTestCase {
|
||||||
boolean success = true;
|
boolean success = true;
|
||||||
for (int i = 0; i < 200; i++) {
|
for (int i = 0; i < 200; i++) {
|
||||||
CollectionAdminRequest.RequestStatusResponse rsp = requestStatus.process(cloudClient);
|
CollectionAdminRequest.RequestStatusResponse rsp = requestStatus.process(cloudClient);
|
||||||
assertTrue(rsp.getRequestStatus().toString(), rsp.getRequestStatus() != RequestStatusState.COMPLETED);
|
assertNotSame(rsp.getRequestStatus().toString(), rsp.getRequestStatus(), RequestStatusState.COMPLETED);
|
||||||
if (rsp.getRequestStatus() == RequestStatusState.FAILED) {
|
if (rsp.getRequestStatus() == RequestStatusState.FAILED) {
|
||||||
success = false;
|
success = false;
|
||||||
break;
|
break;
|
||||||
|
@ -306,46 +301,11 @@ public class MoveReplicaTest extends SolrCloudTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private CollectionAdminRequest.MoveReplica createMoveReplicaRequest(String coll, Replica replica, String targetNode, String shardId) {
|
private CollectionAdminRequest.MoveReplica createMoveReplicaRequest(String coll, Replica replica, String targetNode, String shardId) {
|
||||||
if (random().nextBoolean()) {
|
|
||||||
return new CollectionAdminRequest.MoveReplica(coll, shardId, targetNode, replica.getNodeName());
|
return new CollectionAdminRequest.MoveReplica(coll, shardId, targetNode, replica.getNodeName());
|
||||||
} else {
|
|
||||||
// for backcompat testing of SOLR-11068
|
|
||||||
// todo remove in solr 8.0
|
|
||||||
return new BackCompatMoveReplicaRequest(coll, shardId, targetNode, replica.getNodeName());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private CollectionAdminRequest.MoveReplica createMoveReplicaRequest(String coll, Replica replica, String targetNode) {
|
private CollectionAdminRequest.MoveReplica createMoveReplicaRequest(String coll, Replica replica, String targetNode) {
|
||||||
if (random().nextBoolean()) {
|
|
||||||
return new CollectionAdminRequest.MoveReplica(coll, replica.getName(), targetNode);
|
return new CollectionAdminRequest.MoveReplica(coll, replica.getName(), targetNode);
|
||||||
} else {
|
|
||||||
// for backcompat testing of SOLR-11068
|
|
||||||
// todo remove in solr 8.0
|
|
||||||
return new BackCompatMoveReplicaRequest(coll, replica.getName(), targetNode);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Added for backcompat testing
|
|
||||||
* todo remove in solr 8.0
|
|
||||||
*/
|
|
||||||
static class BackCompatMoveReplicaRequest extends CollectionAdminRequest.MoveReplica {
|
|
||||||
public BackCompatMoveReplicaRequest(String collection, String replica, String targetNode) {
|
|
||||||
super(collection, replica, targetNode);
|
|
||||||
}
|
|
||||||
|
|
||||||
public BackCompatMoveReplicaRequest(String collection, String shard, String sourceNode, String targetNode) {
|
|
||||||
super(collection, shard, sourceNode, targetNode);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public SolrParams getParams() {
|
|
||||||
ModifiableSolrParams params = (ModifiableSolrParams) super.getParams();
|
|
||||||
if (randomlyMoveReplica) {
|
|
||||||
params.set(CollectionParams.FROM_NODE, sourceNode);
|
|
||||||
}
|
|
||||||
return params;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Replica getRandomReplica(String coll, CloudSolrClient cloudClient) {
|
private Replica getRandomReplica(String coll, CloudSolrClient cloudClient) {
|
||||||
|
@ -369,9 +329,8 @@ public class MoveReplicaTest extends SolrCloudTestCase {
|
||||||
return status.getCoreStatus().size();
|
return status.getCoreStatus().size();
|
||||||
} else {
|
} else {
|
||||||
int size = 0;
|
int size = 0;
|
||||||
Iterator<Map.Entry<String, NamedList<Object>>> it = status.getCoreStatus().iterator();
|
for (Map.Entry<String, NamedList<Object>> stringNamedListEntry : status.getCoreStatus()) {
|
||||||
while (it.hasNext()) {
|
String coll = (String) stringNamedListEntry.getValue().findRecursive("cloud", "collection");
|
||||||
String coll = (String)it.next().getValue().findRecursive("cloud", "collection");
|
|
||||||
if (collectionName.equals(coll)) {
|
if (collectionName.equals(coll)) {
|
||||||
size++;
|
size++;
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,14 +17,12 @@
|
||||||
|
|
||||||
package org.apache.solr.cloud.hdfs;
|
package org.apache.solr.cloud.hdfs;
|
||||||
|
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
|
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.solr.client.solrj.SolrQuery;
|
import org.apache.solr.client.solrj.SolrQuery;
|
||||||
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
|
||||||
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
|
||||||
import org.apache.solr.client.solrj.response.QueryResponse;
|
import org.apache.solr.client.solrj.response.QueryResponse;
|
||||||
import org.apache.solr.cloud.MoveReplicaHDFSTest;
|
|
||||||
import org.apache.solr.cloud.SolrCloudTestCase;
|
import org.apache.solr.cloud.SolrCloudTestCase;
|
||||||
import org.apache.solr.common.SolrInputDocument;
|
import org.apache.solr.common.SolrInputDocument;
|
||||||
import org.apache.solr.common.cloud.Replica;
|
import org.apache.solr.common.cloud.Replica;
|
||||||
|
@ -34,8 +32,7 @@ import org.junit.AfterClass;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
|
|
||||||
@ThreadLeakFilters(defaultFilters = true, filters = {
|
@ThreadLeakFilters(defaultFilters = true, filters = {
|
||||||
BadHdfsThreadsFilter.class, // hdfs currently leaks thread(s)
|
BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
|
||||||
MoveReplicaHDFSTest.ForkJoinThreadsFilter.class
|
|
||||||
})
|
})
|
||||||
public class HDFSCollectionsAPITest extends SolrCloudTestCase {
|
public class HDFSCollectionsAPITest extends SolrCloudTestCase {
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue