SOLR-12823: fix TestZKPropertiesWriter (#1575)

SOLR-12823: remove clusterstate.json in Lucene/Solr 9.0 - fix TestZKPropertiesWriter

TestZKPropertiesWriter relied on removed legacy features of the SolrCloud cluster to work.
Start a MiniSolrCloudCluster (implies config set and other test resources config) and have the test use the core of a created collection.
This commit is contained in:
Ilan Ginzburg 2020-06-22 18:02:26 +02:00 committed by GitHub
parent aa5b26f6c4
commit 62dd94d26b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 614 additions and 48 deletions

View File

@ -0,0 +1,69 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- Configuration for asynchronous logging -->
<Configuration>
<Appenders>
<Console name="STDERR" target="SYSTEM_ERR">
<PatternLayout>
<Pattern>
%maxLen{%-4r %-5p (%t) [%X{node_name} %X{collection} %X{shard} %X{replica} %X{core} %X{trace_id}] %c{1.} %m%notEmpty{
=>%ex{short}}}{10240}%n
</Pattern>
</PatternLayout>
</Console>
</Appenders>
<Loggers>
<AsyncLogger name="org.apache.zookeeper" level="WARN"/>
<AsyncLogger name="org.apache.hadoop" level="WARN"/>
<AsyncLogger name="org.apache.directory" level="WARN"/>
<AsyncLogger name="org.apache.solr.hadoop" level="INFO"/>
<AsyncLogger name="org.eclipse.jetty" level="INFO"/>
<AsyncRoot level="INFO">
<AppenderRef ref="STDERR"/>
</AsyncRoot>
</Loggers>
</Configuration>
<!-- Configuration for synchronous logging
there _may_ be a very small window where log messages will not be flushed
to the log file on abnormal shutdown. If even this risk is unacceptable, use
the configuration below
-->
<!--Configuration>
<Appenders>
<Console name="STDERR" target="SYSTEM_ERR">
<PatternLayout>
<Pattern>
%-4r %-5p (%t) [%X{node_name} %X{collection} %X{shard} %X{replica} %X{core}] %c{1.} %m%n
</Pattern>
</PatternLayout>
</Console>
</Appenders>
<Loggers>
<Logger name="org.apache.zookeeper" level="WARN"/>
<Logger name="org.apache.hadoop" level="WARN"/>
<Logger name="org.apache.directory" level="WARN"/>
<Logger name="org.apache.solr.hadoop" level="INFO"/>
<Logger name="org.eclipse.jetty" level="INFO"/>
<Root level="INFO">
<AppenderRef ref="STDERR"/>
</Root>
</Loggers>
</Configuration-->

View File

@ -0,0 +1 @@
The collection1 directory is needed because it is used as a marker in SolrTestCaseJ4.TEST_PATH() to find the configsets

View File

@ -0,0 +1,2 @@
The files here are copies of "dataimport-solrconfig.xml" and "dataimport-schema.xml"
This config set is used by test org.apache.solr.handler.dataimport.TestZKPropertiesWriter that is starting a SolrCloud mini cluster.

View File

@ -0,0 +1,70 @@
<schema name="dih_test" version="4.0">
<fieldType name="string" class="solr.StrField" sortMissingLast="true" omitNorms="true"/>
<fieldType name="boolean" class="solr.BoolField" sortMissingLast="true" omitNorms="true"/>
<fieldType name="tint" class="${solr.tests.IntegerFieldType}" docValues="${solr.tests.numeric.dv}" precisionStep="8" positionIncrementGap="0"/>
<fieldType name="tfloat" class="${solr.tests.FloatFieldType}" docValues="${solr.tests.numeric.dv}" precisionStep="8" positionIncrementGap="0"/>
<fieldType name="tlong" class="${solr.tests.LongFieldType}" docValues="${solr.tests.numeric.dv}" precisionStep="8" positionIncrementGap="0"/>
<fieldType name="tdouble" class="${solr.tests.DoubleFieldType}" docValues="${solr.tests.numeric.dv}" precisionStep="8" positionIncrementGap="0"/>
<fieldType name="date" class="${solr.tests.DateFieldType}" docValues="${solr.tests.numeric.dv}" sortMissingLast="true" omitNorms="true"/>
<fieldType name="text" class="solr.TextField" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterGraphFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="1"
catenateNumbers="1" catenateAll="0" splitOnCaseChange="1"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
<filter class="solr.FlattenGraphFilterFactory" />
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterGraphFilterFactory" generateWordParts="1" generateNumberParts="1" catenateWords="0"
catenateNumbers="0" catenateAll="0" splitOnCaseChange="1"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
</analyzer>
</fieldType>
<fieldType name="textTight" class="solr.TextField" positionIncrementGap="100">
<analyzer type="index">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterGraphFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1"
catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
<filter class="solr.FlattenGraphFilterFactory" />
</analyzer>
<analyzer type="query">
<tokenizer class="solr.MockTokenizerFactory"/>
<filter class="solr.WordDelimiterGraphFilterFactory" generateWordParts="0" generateNumberParts="0" catenateWords="1"
catenateNumbers="1" catenateAll="0"/>
<filter class="solr.LowerCaseFilterFactory"/>
<filter class="solr.RemoveDuplicatesTokenFilterFactory"/>
</analyzer>
</fieldType>
<fieldType name="ignored" stored="false" indexed="false" class="solr.StrField"/>
<field name="id" type="string" indexed="true" stored="true" required="true"/>
<field name="desc" type="string" indexed="true" stored="true" multiValued="true"/>
<field name="date" type="date" indexed="true" stored="true"/>
<field name="timestamp" type="date" indexed="true" stored="true" default="NOW" multiValued="false"/>
<field name="NAME" type="text" indexed="true" stored="true" multiValued="false"/>
<field name="COUNTRY_NAME" type="text" indexed="true" stored="true" multiValued="true"/>
<field name="SPORT_NAME" type="text" indexed="true" stored="true" multiValued="true"/>
<field name="DO_NOT_INDEX" type="ignored"/>
<field name="_version_" type="tlong" indexed="true" stored="true" multiValued="false"/>
<field name="_root_" type="string" indexed="true" stored="true" multiValued="false"/>
<dynamicField name="*_i" type="tint" indexed="true" stored="true"/>
<dynamicField name="*_s" type="string" indexed="true" stored="true"/>
<dynamicField name="*_mult_s" type="string" indexed="true" stored="true" multiValued="true"/>
<dynamicField name="*_l" type="tlong" indexed="true" stored="true"/>
<dynamicField name="*_t" type="text" indexed="true" stored="true"/>
<dynamicField name="*_b" type="boolean" indexed="true" stored="true"/>
<dynamicField name="*_f" type="tfloat" indexed="true" stored="true"/>
<dynamicField name="*_d" type="tdouble" indexed="true" stored="true"/>
<dynamicField name="*_dt" type="date" indexed="true" stored="true"/>
<uniqueKey>id</uniqueKey>
</schema>

View File

@ -0,0 +1,287 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<config>
<luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
<indexConfig>
<useCompoundFile>${useCompoundFile:false}</useCompoundFile>
</indexConfig>
<!-- Used to specify an alternate directory to hold all index data
other than the default ./data under the Solr home.
If replication is in use, this should match the replication configuration. -->
<dataDir>${solr.data.dir:}</dataDir>
<directoryFactory name="DirectoryFactory" class="${solr.directoryFactory:solr.RAMDirectoryFactory}"/>
<schemaFactory class="ClassicIndexSchemaFactory"/>
<!-- the default high-performance update handler -->
<updateHandler class="solr.DirectUpdateHandler2">
<!-- A prefix of "solr." for class names is an alias that
causes solr to search appropriate packages, including
org.apache.solr.(search|update|request|core|analysis)
-->
<!-- Limit the number of deletions Solr will buffer during doc updating.
Setting this lower can help bound memory use during indexing.
-->
<maxPendingDeletes>100000</maxPendingDeletes>
</updateHandler>
<query>
<!-- Maximum number of clauses in a boolean query... can affect
range or prefix queries that expand to big boolean
queries. An exception is thrown if exceeded. -->
<maxBooleanClauses>${solr.max.booleanClauses:1024}</maxBooleanClauses>
<!-- Cache used by SolrIndexSearcher for filters (DocSets),
unordered sets of *all* documents that match a query.
When a new searcher is opened, its caches may be prepopulated
or "autowarmed" using data from caches in the old searcher.
autowarmCount is the number of items to prepopulate. For CaffeineCache,
the autowarmed items will be the most recently accessed items.
Parameters:
class - the SolrCache implementation (currently only CaffeineCache)
size - the maximum number of entries in the cache
initialSize - the initial capacity (number of entries) of
the cache. (seel java.util.HashMap)
autowarmCount - the number of entries to prepopulate from
and old cache.
-->
<filterCache
class="solr.CaffeineCache"
size="512"
initialSize="512"
autowarmCount="256"/>
<!-- queryResultCache caches results of searches - ordered lists of
document ids (DocList) based on a query, a sort, and the range
of documents requested. -->
<queryResultCache
class="solr.CaffeineCache"
size="512"
initialSize="512"
autowarmCount="256"/>
<!-- documentCache caches Lucene Document objects (the stored fields for each document).
Since Lucene internal document ids are transient, this cache will not be autowarmed. -->
<documentCache
class="solr.CaffeineCache"
size="512"
initialSize="512"
autowarmCount="0"/>
<!-- If true, stored fields that are not requested will be loaded lazily.
This can result in a significant speed improvement if the usual case is to
not load all stored fields, especially if the skipped fields are large compressed
text fields.
-->
<enableLazyFieldLoading>true</enableLazyFieldLoading>
<!-- Example of a generic cache. These caches may be accessed by name
through SolrIndexSearcher.getCache(),cacheLookup(), and cacheInsert().
The purpose is to enable easy caching of user/application level data.
The regenerator argument should be specified as an implementation
of solr.search.CacheRegenerator if autowarming is desired. -->
<!--
<cache name="myUserCache"
class="solr.CaffeineCache"
size="4096"
initialSize="1024"
autowarmCount="1024"
regenerator="org.mycompany.mypackage.MyRegenerator"
/>
-->
<!-- An optimization that attempts to use a filter to satisfy a search.
If the requested sort does not include score, then the filterCache
will be checked for a filter matching the query. If found, the filter
will be used as the source of document ids, and then the sort will be
applied to that.
<useFilterForSortedQuery>true</useFilterForSortedQuery>
-->
<!-- An optimization for use with the queryResultCache. When a search
is requested, a superset of the requested number of document ids
are collected. For example, if a search for a particular query
requests matching documents 10 through 19, and queryWindowSize is 50,
then documents 0 through 49 will be collected and cached. Any further
requests in that range can be satisfied via the cache. -->
<queryResultWindowSize>50</queryResultWindowSize>
<!-- Maximum number of documents to cache for any entry in the
queryResultCache. -->
<queryResultMaxDocsCached>200</queryResultMaxDocsCached>
<!-- a newSearcher event is fired whenever a new searcher is being prepared
and there is a current searcher handling requests (aka registered). -->
<!-- QuerySenderListener takes an array of NamedList and executes a
local query request for each NamedList in sequence. -->
<!--<listener event="newSearcher" class="solr.QuerySenderListener">-->
<!--<arr name="queries">-->
<!--<lst> <str name="q">solr</str> <str name="start">0</str> <str name="rows">10</str> </lst>-->
<!--<lst> <str name="q">rocks</str> <str name="start">0</str> <str name="rows">10</str> </lst>-->
<!--<lst><str name="q">static newSearcher warming query from solrconfig.xml</str></lst>-->
<!--</arr>-->
<!--</listener>-->
<!-- a firstSearcher event is fired whenever a new searcher is being
prepared but there is no current registered searcher to handle
requests or to gain autowarming data from. -->
<!--<listener event="firstSearcher" class="solr.QuerySenderListener">-->
<!--<arr name="queries">-->
<!--</arr>-->
<!--</listener>-->
<!-- If a search request comes in and there is no current registered searcher,
then immediately register the still warming searcher and use it. If
"false" then all requests will block until the first searcher is done
warming. -->
<useColdSearcher>false</useColdSearcher>
<!-- Maximum number of searchers that may be warming in the background
concurrently. An error is returned if this limit is exceeded. Recommend
1-2 for read-only slaves, higher for masters w/o cache warming. -->
<maxWarmingSearchers>4</maxWarmingSearchers>
</query>
<requestDispatcher>
<!--Make sure your system has some authentication before enabling remote streaming!
<requestParsers enableRemoteStreaming="false" multipartUploadLimitInKB="-1" />
-->
<!-- Set HTTP caching related parameters (for proxy caches and clients).
To get the behaviour of Solr 1.2 (ie: no caching related headers)
use the never304="true" option and do not specify a value for
<cacheControl>
-->
<httpCaching never304="true">
<!--httpCaching lastModifiedFrom="openTime"
etagSeed="Solr"-->
<!-- lastModFrom="openTime" is the default, the Last-Modified value
(and validation against If-Modified-Since requests) will all be
relative to when the current Searcher was opened.
You can change it to lastModFrom="dirLastMod" if you want the
value to exactly corrispond to when the physical index was last
modified.
etagSeed="..." is an option you can change to force the ETag
header (and validation against If-None-Match requests) to be
differnet even if the index has not changed (ie: when making
significant changes to your config file)
lastModifiedFrom and etagSeed are both ignored if you use the
never304="true" option.
-->
<!-- If you include a <cacheControl> directive, it will be used to
generate a Cache-Control header, as well as an Expires header
if the value contains "max-age="
By default, no Cache-Control header is generated.
You can use the <cacheControl> option even if you have set
never304="true"
-->
<!-- <cacheControl>max-age=30, public</cacheControl> -->
</httpCaching>
</requestDispatcher>
<requestHandler name="/select" class="solr.SearchHandler">
<!-- default values for query parameters -->
<lst name="defaults">
<str name="echoParams">explicit</str>
<str name="df">desc</str>
<!--
<int name="rows">10</int>
<str name="fl">*</str>
<str name="version">2.1</str>
-->
</lst>
</requestHandler>
<requestHandler name="/dataimport" class="org.apache.solr.handler.dataimport.DataImportHandler">
<lst name="defaults">
<str name="dots.in.hsqldb.driver">org.hsqldb.jdbcDriver</str>
</lst>
</requestHandler>
<!--
Search components are registered to SolrCore and used by Search Handlers
By default, the following components are avaliable:
<searchComponent name="query" class="org.apache.solr.handler.component.QueryComponent" />
<searchComponent name="facet" class="org.apache.solr.handler.component.FacetComponent" />
<searchComponent name="mlt" class="org.apache.solr.handler.component.MoreLikeThisComponent" />
<searchComponent name="highlight" class="org.apache.solr.handler.component.HighlightComponent" />
<searchComponent name="debug" class="org.apache.solr.handler.component.DebugComponent" />
If you register a searchComponent to one of the standard names, that will be used instead.
-->
<requestHandler name="/search" class="org.apache.solr.handler.component.SearchHandler">
<lst name="defaults">
<str name="echoParams">explicit</str>
</lst>
<!--
By default, this will register the following components:
<arr name="components">
<str>query</str>
<str>facet</str>
<str>mlt</str>
<str>highlight</str>
<str>debug</str>
</arr>
To insert handlers before or after the 'standard' components, use:
<arr name="first-components">
<str>first</str>
</arr>
<arr name="last-components">
<str>last</str>
</arr>
-->
</requestHandler>
<!-- config for the admin interface -->
<admin>
<defaultQuery>*:*</defaultQuery>
</admin>
<updateRequestProcessorChain key="dataimport" default="true">
<processor class="org.apache.solr.handler.dataimport.AbstractDataImportHandlerTestCase$TestUpdateRequestProcessorFactory"/>
<processor class="solr.RunUpdateProcessorFactory"/>
<processor class="solr.LogUpdateProcessorFactory"/>
</updateRequestProcessorChain>
</config>

View File

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!--
solr.xml mimicking the old default solr.xml
-->
<solr>
<shardHandlerFactory name="shardHandlerFactory" class="HttpShardHandlerFactory">
<str name="urlScheme">${urlScheme:}</str>
</shardHandlerFactory>
</solr>

View File

@ -16,9 +16,12 @@
*/
package org.apache.solr.handler.dataimport;
import javax.xml.xpath.XPathExpressionException;
import java.io.ByteArrayOutputStream;
import java.io.StringWriter;
import java.lang.invoke.MethodHandles;
import java.nio.file.Path;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
@ -27,51 +30,63 @@ import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.cloud.MiniSolrCloudCluster;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.cloud.ZkTestServer;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.cloud.DocCollection;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SuppressForbidden;
import org.apache.solr.core.CoreContainer;
import org.apache.solr.core.SolrCore;
import org.apache.solr.request.LocalSolrQueryRequest;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestInfo;
import org.apache.solr.response.BinaryQueryResponseWriter;
import org.apache.solr.response.QueryResponseWriter;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.util.BaseTestHarness;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestZKPropertiesWriter extends AbstractDataImportHandlerTestCase {
/**
* Tests that DIH properties writer works when using Zookeeper. Zookeeper is used by virtue of starting a SolrCloud cluster.<p>
*
* Note this test is an unelegant bridge between code that assumes a non SolrCloud environment and that would normally use
* test infra that is not meant to work in a SolrCloud environment ({@link org.apache.solr.util.TestHarness} and some methods in
* {@link org.apache.solr.SolrTestCaseJ4}) and between a test running SolrCloud (extending {@link SolrCloudTestCase} and
* using {@link MiniSolrCloudCluster}).<p>
*
* These changes were introduced when https://issues.apache.org/jira/browse/SOLR-12823 got fixed and the legacy
* behaviour of SolrCloud that allowed a SolrCloud (Zookeeper active) to function like a standalone Solr (in which the
* cluster would adopt cores contributed by the nodes even if they were unknown to Zookeeper) was no more.
*/
public class TestZKPropertiesWriter extends SolrCloudTestCase {
private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
protected static ZkTestServer zkServer;
protected static Path zkDir;
private static CoreContainer cc;
private static MiniSolrCloudCluster minicluster;
private String dateFormat = "yyyy-MM-dd HH:mm:ss.SSSSSS";
@BeforeClass
public static void dihZk_beforeClass() throws Exception {
zkDir = createTempDir("zkData");
zkServer = new ZkTestServer(zkDir);
zkServer.run();
System.setProperty(DataImportHandler.ENABLE_DIH_DATA_CONFIG_PARAM, "true");
System.setProperty("solrcloud.skip.autorecovery", "true");
System.setProperty("zkHost", zkServer.getZkAddress());
System.setProperty("jetty.port", "0000");
zkServer.buildZooKeeper(getFile("dih/solr"),
"dataimport-solrconfig.xml", "dataimport-schema.xml");
//initCore("solrconfig.xml", "schema.xml", getFile("dih/solr").getAbsolutePath());
cc = createDefaultCoreContainer(getFile("dih/solr").toPath());
}
@Before
public void beforeDihZKTest() throws Exception {
minicluster = configureCluster(1)
.addConfig("conf", configset("dihconfigset"))
.configure();
zkServer = minicluster.getZkServer();
}
@After
@ -79,36 +94,31 @@ public class TestZKPropertiesWriter extends AbstractDataImportHandlerTestCase {
MockDataSource.clearCache();
}
@AfterClass
public static void dihZk_afterClass() throws Exception {
if (null != cc) {
cc.shutdown();
cc = null;
}
if (null != zkServer) {
zkServer.shutdown();
zkServer = null;
}
zkDir = null;
shutdownCluster();
}
@SuppressForbidden(reason = "Needs currentTimeMillis to construct date stamps")
@Test
@SuppressWarnings({"unchecked"})
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-12823")
public void testZKPropertiesWriter() throws Exception {
// test using ZooKeeper
assertTrue("Not using ZooKeeper", h.getCoreContainer().isZooKeeperAware());
CollectionAdminRequest.createCollectionWithImplicitRouter("collection1", "conf", "1", 1)
.setMaxShardsPerNode(1)
.process(cluster.getSolrClient());
// for the really slow/busy computer, we wait to make sure we have a leader before starting
h.getCoreContainer().getZkController().getZkStateReader().getLeaderUrl("collection1", "shard1", 30000);
// DIH talks core, SolrCloud talks collection.
DocCollection coll = getCollectionState("collection1");
Replica replica = coll.getReplicas().iterator().next();
JettySolrRunner jetty = minicluster.getReplicaJetty(replica);
SolrCore core = jetty.getCoreContainer().getCore(replica.getCoreName());
assertQ("test query on empty index", request("qlkciyopsbgzyvkylsjhchghjrdf"),
"//result[@numFound='0']");
localAssertQ("test query on empty index", request(core, "qlkciyopsbgzyvkylsjhchghjrdf"), "//result[@numFound='0']");
SimpleDateFormat errMsgFormat = new SimpleDateFormat(dateFormat, Locale.ROOT);
// These two calls are from SolrTestCaseJ4 and end up in TestHarness... That's ok they are static and do not reference
// the various variables that were not initialized (so not copying them to this test class as some other methods at the bottom).
delQ("*:*");
commit();
SimpleDateFormat df = new SimpleDateFormat(dateFormat, Locale.ROOT);
@ -117,7 +127,7 @@ public class TestZKPropertiesWriter extends AbstractDataImportHandlerTestCase {
Map<String, String> init = new HashMap<>();
init.put("dateFormat", dateFormat);
ZKPropertiesWriter spw = new ZKPropertiesWriter();
spw.init(new DataImporter(h.getCore(), "dataimport"), init);
spw.init(new DataImporter(core, "dataimport"), init);
Map<String, Object> props = new HashMap<>();
props.put("SomeDates.last_index_time", oneSecondAgo);
props.put("last_index_time", oneSecondAgo);
@ -125,10 +135,10 @@ public class TestZKPropertiesWriter extends AbstractDataImportHandlerTestCase {
@SuppressWarnings({"rawtypes"})
List rows = new ArrayList();
rows.add(createMap("id", "1", "year_s", "2013"));
rows.add(AbstractDataImportHandlerTestCase.createMap("id", "1", "year_s", "2013"));
MockDataSource.setIterator("select " + df.format(oneSecondAgo) + " from dummy", rows.iterator());
h.query("/dataimport", lrf.makeRequest("command", "full-import", "dataConfig",
localQuery("/dataimport", localMakeRequest(core, "command", "full-import", "dataConfig",
generateConfig(), "clean", "true", "commit", "true", "synchronous",
"true", "indent", "true"));
props = spw.readIndexerProperties();
@ -137,12 +147,13 @@ public class TestZKPropertiesWriter extends AbstractDataImportHandlerTestCase {
Assert.assertTrue("This date: " + errMsgFormat.format(oneSecondAgo) + " should be prior to the document date: " + errMsgFormat.format(docDate), docDate.getTime() - oneSecondAgo.getTime() > 0);
Assert.assertTrue("This date: " + errMsgFormat.format(oneSecondAgo) + " should be prior to the entity date: " + errMsgFormat.format(entityDate), entityDate.getTime() - oneSecondAgo.getTime() > 0);
assertQ(request("*:*"), "//*[@numFound='1']", "//doc/str[@name=\"year_s\"]=\"2013\"");
localAssertQ("Should have found 1 doc, year 2013", request(core, "*:*"), "//*[@numFound='1']", "//doc/str[@name=\"year_s\"]=\"2013\"");
core.close();
}
public SolrQueryRequest request(String... q) {
LocalSolrQueryRequest req = lrf.makeRequest(q);
private static SolrQueryRequest request(SolrCore core, String... q) {
LocalSolrQueryRequest req = localMakeRequest(core, q);
ModifiableSolrParams params = new ModifiableSolrParams();
params.add(req.getParams());
params.set("distrib", true);
@ -150,10 +161,10 @@ public class TestZKPropertiesWriter extends AbstractDataImportHandlerTestCase {
return req;
}
protected String generateConfig() {
private String generateConfig() {
StringBuilder sb = new StringBuilder();
sb.append("<dataConfig> \n");
sb.append("<propertyWriter dateFormat=\"" + dateFormat + "\" type=\"ZKPropertiesWriter\" />\n");
sb.append("<propertyWriter dateFormat=\"").append(dateFormat).append("\" type=\"ZKPropertiesWriter\" />\n");
sb.append("<dataSource name=\"mock\" type=\"MockDataSource\"/>\n");
sb.append("<document name=\"TestSimplePropertiesWriter\"> \n");
sb.append("<entity name=\"SomeDates\" processor=\"SqlEntityProcessor\" dataSource=\"mock\" ");
@ -166,4 +177,103 @@ public class TestZKPropertiesWriter extends AbstractDataImportHandlerTestCase {
log.debug(config);
return config;
}
/**
* Code copied with some adaptations from {@link org.apache.solr.util.TestHarness.LocalRequestFactory#makeRequest(String...)}.
*/
private static LocalSolrQueryRequest localMakeRequest(SolrCore core, String ... q) {
if (q.length==1) {
Map<String, String> args = new HashMap<>();
args.put(CommonParams.VERSION,"2.2");
return new LocalSolrQueryRequest(core, q[0], "", 0, 20, args);
}
if (q.length%2 != 0) {
throw new RuntimeException("The length of the string array (query arguments) needs to be even");
}
@SuppressWarnings({"rawtypes"})
Map.Entry<String, String> [] entries = new NamedList.NamedListEntry[q.length / 2];
for (int i = 0; i < q.length; i += 2) {
entries[i/2] = new NamedList.NamedListEntry<>(q[i], q[i+1]);
}
@SuppressWarnings({"rawtypes"})
NamedList nl = new NamedList(entries);
if(nl.get("wt" ) == null) nl.add("wt","xml");
return new LocalSolrQueryRequest(core, nl);
}
/**
* Code copied from {@link org.apache.solr.util.TestHarness#query(String, SolrQueryRequest)} because it is not
* <code>static</code> there (it could have been) and we do not have an instance of {@link org.apache.solr.util.TestHarness}.
*/
private static String localQuery(String handler, SolrQueryRequest req) throws Exception {
try {
SolrCore core = req.getCore();
SolrQueryResponse rsp = new SolrQueryResponse();
SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req, rsp));
core.execute(core.getRequestHandler(handler),req,rsp); // TODO the core doesn't have the request handler
if (rsp.getException() != null) {
throw rsp.getException();
}
QueryResponseWriter responseWriter = core.getQueryResponseWriter(req);
if (responseWriter instanceof BinaryQueryResponseWriter) {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(32000);
BinaryQueryResponseWriter writer = (BinaryQueryResponseWriter) responseWriter;
writer.write(byteArrayOutputStream, req, rsp);
return new String(byteArrayOutputStream.toByteArray(), StandardCharsets.UTF_8);
} else {
StringWriter sw = new StringWriter(32000);
responseWriter.write(sw,req,rsp);
return sw.toString();
}
} finally {
req.close();
SolrRequestInfo.clearRequestInfo();
}
}
/**
* Code copied from {@link org.apache.solr.SolrTestCaseJ4#assertQ(String, SolrQueryRequest, String...)} in order not to
* use the instance of the {@link org.apache.solr.util.TestHarness}.
*/
private static void localAssertQ(String message, SolrQueryRequest req, String... tests) {
try {
String m = (null == message) ? "" : message + " "; // TODO log 'm' !!!
//since the default (standard) response format is now JSON
//need to explicitly request XML since this class uses XPath
ModifiableSolrParams xmlWriterTypeParams = new ModifiableSolrParams(req.getParams());
xmlWriterTypeParams.set(CommonParams.WT,"xml");
//for tests, let's turn indention off so we don't have to handle extraneous spaces
xmlWriterTypeParams.set("indent", xmlWriterTypeParams.get("indent", "off"));
req.setParams(xmlWriterTypeParams);
String response = localQuery(req.getParams().get(CommonParams.QT), req);
if (req.getParams().getBool("facet", false)) {
// add a test to ensure that faceting did not throw an exception
// internally, where it would be added to facet_counts/exception
String[] allTests = new String[tests.length+1];
System.arraycopy(tests,0,allTests,1,tests.length);
allTests[0] = "*[count(//lst[@name='facet_counts']/*[@name='exception'])=0]";
tests = allTests;
}
String results = BaseTestHarness.validateXPath(response, tests);
if (null != results) {
String msg = "REQUEST FAILED: xpath=" + results
+ "\n\txml response was: " + response
+ "\n\trequest was:" + req.getParamString();
log.error(msg);
throw new RuntimeException(msg);
}
} catch (XPathExpressionException e1) {
throw new RuntimeException("XPath is invalid", e1);
} catch (Exception e2) {
SolrException.log(log,"REQUEST FAILED: " + req.getParamString(), e2);
throw new RuntimeException("Exception during query", e2);
}
}
}