From 10fc0d75d784a620822079f09783891f5931e4c9 Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Tue, 25 Jul 2017 11:05:24 +0100 Subject: [PATCH 01/95] SOLR-2715: turn @Ignore into @AwaitsFix --- .../core/src/test/org/apache/solr/core/TestJmxIntegration.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java b/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java index 70df3c52177..a0f4071d482 100644 --- a/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java +++ b/solr/core/src/test/org/apache/solr/core/TestJmxIntegration.java @@ -23,7 +23,6 @@ import org.apache.solr.metrics.reporters.SolrJmxReporter; import org.apache.solr.util.AbstractSolrTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; -import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -156,7 +155,7 @@ public class TestJmxIntegration extends AbstractSolrTestCase { numDocs > oldNumDocs); } - @Test @Ignore("timing problem? https://issues.apache.org/jira/browse/SOLR-2715") + @Test @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-2715") // timing problem? public void testJmxOnCoreReload() throws Exception { String coreName = h.getCore().getName(); From aa1d5feba01a50d4bc002e346e54ff26f6abc664 Mon Sep 17 00:00:00 2001 From: yonik Date: Tue, 25 Jul 2017 10:49:24 -0400 Subject: [PATCH 02/95] SOLR-11093: add Points to GraphQuery --- solr/CHANGES.txt | 2 + .../apache/solr/search/facet/UniqueAgg.java | 82 +----- .../apache/solr/search/join/GraphQuery.java | 89 ++---- .../solr/search/join/GraphQueryParser.java | 1 + .../solr/search/join/GraphTermsCollector.java | 260 +++++++++++++++--- .../solr/util/{hll => }/LongIterator.java | 6 +- .../java/org/apache/solr/util/LongSet.java | 135 +++++++++ .../org/apache/solr/util/hll/BitVector.java | 2 + .../java/org/apache/solr/util/hll/HLL.java | 1 + .../solr/collection1/conf/schema_latest.xml | 17 ++ .../solr/search/join/GraphQueryTest.java | 23 +- .../apache/solr/util/hll/BitVectorTest.java | 1 + .../org/apache/solr/util/hll/FullHLLTest.java | 1 + 13 files changed, 433 insertions(+), 187 deletions(-) rename solr/core/src/java/org/apache/solr/util/{hll => }/LongIterator.java (86%) create mode 100644 solr/core/src/java/org/apache/solr/util/LongSet.java diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index b123477c92e..bc1b2b66587 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -316,6 +316,8 @@ New Features * SOLR-10282: bin/solr support for enabling Kerberos authentication (Ishan Chattopadhyaya, Jason Gerlowski) +* SOLR-11093: Add support for PointFields for {!graph} query. (yonik) + Bug Fixes ---------------------- * SOLR-9262: Connection and read timeouts are being ignored by UpdateShardHandler after SOLR-4509. diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java index dba410ac3c4..454197595b7 100644 --- a/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java +++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueAgg.java @@ -29,6 +29,8 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.schema.SchemaField; +import org.apache.solr.util.LongIterator; +import org.apache.solr.util.LongSet; public class UniqueAgg extends StrAggValueSource { public static String UNIQUE = "unique"; @@ -122,75 +124,6 @@ public class UniqueAgg extends StrAggValueSource { } - - static class LongSet { - - static final float LOAD_FACTOR = 0.7f; - - long[] vals; - int cardinality; - int mask; - int threshold; - int zeroCount; // 1 if a 0 was collected - - /** sz must be a power of two */ - LongSet(int sz) { - vals = new long[sz]; - mask = sz - 1; - threshold = (int) (sz * LOAD_FACTOR); - } - - void add(long val) { - if (val == 0) { - zeroCount = 1; - return; - } - if (cardinality >= threshold) { - rehash(); - } - - // For floats: exponent bits start at bit 23 for single precision, - // and bit 52 for double precision. - // Many values will only have significant bits just to the right of that, - // and the leftmost bits will all be zero. - - // For now, lets just settle to get first 8 significant mantissa bits of double or float in the lowest bits of our hash - // The upper bits of our hash will be irrelevant. - int h = (int) (val + (val >>> 44) + (val >>> 15)); - for (int slot = h & mask; ;slot = (slot + 1) & mask) { - long v = vals[slot]; - if (v == 0) { - vals[slot] = val; - cardinality++; - break; - } else if (v == val) { - // val is already in the set - break; - } - } - } - - private void rehash() { - long[] oldVals = vals; - int newCapacity = vals.length << 1; - vals = new long[newCapacity]; - mask = newCapacity - 1; - threshold = (int) (newCapacity * LOAD_FACTOR); - cardinality = 0; - - for (long val : oldVals) { - if (val != 0) { - add(val); - } - } - } - - int cardinality() { - return cardinality + zeroCount; - } - } - - static abstract class BaseNumericAcc extends SlotAcc { SchemaField sf; LongSet[] sets; @@ -259,16 +192,11 @@ public class UniqueAgg extends StrAggValueSource { if (unique <= maxExplicit) { List lst = new ArrayList( Math.min(unique, maxExplicit) ); if (set != null) { - if (set.zeroCount > 0) { - lst.add(0); - } - for (long val : set.vals) { - if (val != 0) { - lst.add(val); - } + LongIterator iter = set.iterator(); + while (iter.hasNext()) { + lst.add( iter.next() ); } } - map.add("vals", lst); } diff --git a/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java b/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java index db416519602..7e52f0c9edb 100644 --- a/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java +++ b/solr/core/src/java/org/apache/solr/search/join/GraphQuery.java @@ -25,16 +25,15 @@ import java.util.TreeSet; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; -import org.apache.lucene.search.AutomatonQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.Weight; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; @@ -42,6 +41,7 @@ import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.DaciukMihovAutomatonBuilder; +import org.apache.solr.schema.SchemaField; import org.apache.solr.search.BitDocSet; import org.apache.solr.search.DocSet; import org.apache.solr.search.Filter; @@ -130,17 +130,22 @@ public class GraphQuery extends Query { protected class GraphQueryWeight extends Weight { final SolrIndexSearcher fromSearcher; - private final float boost; - private int frontierSize = 0; private int currentDepth = -1; private Filter filter; private DocSet resultSet; - + SchemaField fromSchemaField; + SchemaField toSchemaField; + public GraphQueryWeight(SolrIndexSearcher searcher, float boost) { // Grab the searcher so we can run additional searches. super(null); this.fromSearcher = searcher; - this.boost = boost; + this.fromSchemaField = searcher.getSchema().getField(fromField); + this.toSchemaField = searcher.getSchema().getField(toField); + } + + GraphQuery getGraphQuery() { + return GraphQuery.this; } @Override @@ -175,7 +180,7 @@ public class GraphQuery extends Query { // the initial query for the frontier for the first query Query frontierQuery = q; // Find all documents in this graph that are leaf nodes to speed traversal - DocSet leafNodes = resolveLeafNodes(toField); + DocSet leafNodes = resolveLeafNodes(); // Start the breadth first graph traversal. do { @@ -187,25 +192,17 @@ public class GraphQuery extends Query { // if we've reached the max depth, don't worry about collecting edges. fromSet = fromSearcher.getDocSetBits(frontierQuery); // explicitly the frontier size is zero now so we can break - frontierSize = 0; + frontierQuery = null; } else { // when we're not at the max depth level, we need to collect edges // Create the graph result collector for this level - GraphTermsCollector graphResultCollector = new GraphTermsCollector(toField,capacity, resultBits, leafNodes); + GraphEdgeCollector graphResultCollector = toSchemaField.getType().isPointField() + ? new GraphPointsCollector(this, capacity, resultBits, leafNodes) + : new GraphTermsCollector(this, capacity, resultBits, leafNodes); + fromSearcher.search(frontierQuery, graphResultCollector); fromSet = graphResultCollector.getDocSet(); - // All edge ids on the frontier. - BytesRefHash collectorTerms = graphResultCollector.getCollectorTerms(); - frontierSize = collectorTerms.size(); - // The resulting doc set from the frontier. - FrontierQuery fq = buildFrontierQuery(collectorTerms, frontierSize); - if (fq == null) { - // in case we get null back, make sure we know we're done at this level. - frontierSize = 0; - } else { - frontierQuery = fq.getQuery(); - frontierSize = fq.getFrontierSize(); - } + frontierQuery = graphResultCollector.getFrontierQuery(); } if (currentDepth == 0 && !returnRoot) { // grab a copy of the root bits but only if we need it. @@ -217,7 +214,7 @@ public class GraphQuery extends Query { if ((maxDepth != -1 && currentDepth >= maxDepth)) { break; } - } while (frontierSize > 0); + } while (frontierQuery != null); // helper bit set operations on the final result set if (!returnRoot) { resultBits.andNot(rootBits); @@ -232,9 +229,10 @@ public class GraphQuery extends Query { } } - private DocSet resolveLeafNodes(String field) throws IOException { + private DocSet resolveLeafNodes() throws IOException { + String field = toSchemaField.getName(); BooleanQuery.Builder leafNodeQuery = new BooleanQuery.Builder(); - WildcardQuery edgeQuery = new WildcardQuery(new Term(field, "*")); + Query edgeQuery = toSchemaField.hasDocValues() ? new DocValuesFieldExistsQuery(field) : new WildcardQuery(new Term(field, "*")); leafNodeQuery.add(edgeQuery, Occur.MUST_NOT); DocSet leafNodes = fromSearcher.getDocSet(leafNodeQuery.build()); return leafNodes; @@ -252,50 +250,7 @@ public class GraphQuery extends Query { final Automaton a = DaciukMihovAutomatonBuilder.build(terms); return a; } - - /** - * This return a query that represents the documents that match the next hop in the query. - * - * collectorTerms - the terms that represent the edge ids for the current frontier. - * frontierSize - the size of the frontier query (number of unique edges) - * - */ - public FrontierQuery buildFrontierQuery(BytesRefHash collectorTerms, Integer frontierSize) { - if (collectorTerms == null || collectorTerms.size() == 0) { - // return null if there are no terms (edges) to traverse. - return null; - } else { - // Create a query - Query q = null; - // TODO: see if we should dynamically select this based on the frontier size. - if (useAutn) { - // build an automaton based query for the frontier. - Automaton autn = buildAutomaton(collectorTerms); - AutomatonQuery autnQuery = new AutomatonQuery(new Term(fromField), autn); - q = autnQuery; - } else { - List termList = new ArrayList<>(collectorTerms.size()); - for (int i = 0 ; i < collectorTerms.size(); i++) { - BytesRef ref = new BytesRef(); - collectorTerms.get(i, ref); - termList.add(ref); - } - q = new TermInSetQuery(fromField, termList); - } - - // If there is a filter to be used while crawling the graph, add that. - if (traversalFilter != null) { - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.add(q, Occur.MUST); - builder.add(traversalFilter, Occur.MUST); - q = builder.build(); - } - // return the new query. - FrontierQuery frontier = new FrontierQuery(q, frontierSize); - return frontier; - } - } @Override public Scorer scorer(LeafReaderContext context) throws IOException { diff --git a/solr/core/src/java/org/apache/solr/search/join/GraphQueryParser.java b/solr/core/src/java/org/apache/solr/search/join/GraphQueryParser.java index 0ef9e6cf637..9c9b85234fc 100644 --- a/solr/core/src/java/org/apache/solr/search/join/GraphQueryParser.java +++ b/solr/core/src/java/org/apache/solr/search/join/GraphQueryParser.java @@ -41,6 +41,7 @@ public class GraphQueryParser extends QParser { String traversalFilterS = localParams.get("traversalFilter"); Query traversalFilter = traversalFilterS == null ? null : subQuery(traversalFilterS, null).getQuery(); + // NOTE: the from/to are reversed from {!join} String fromField = localParams.get("from", "node_id"); String toField = localParams.get("to", "edge_ids"); diff --git a/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java b/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java index 377f71be6bd..f32c83b9c99 100644 --- a/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java +++ b/solr/core/src/java/org/apache/solr/search/join/GraphTermsCollector.java @@ -17,19 +17,40 @@ package org.apache.solr.search.join; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.TreeSet; +import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.FloatPoint; +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.AutomatonQuery; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.Query; import org.apache.lucene.search.SimpleCollector; +import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.DaciukMihovAutomatonBuilder; +import org.apache.solr.schema.NumberType; +import org.apache.solr.schema.SchemaField; import org.apache.solr.search.BitDocSet; import org.apache.solr.search.DocSet; +import org.apache.solr.util.LongIterator; +import org.apache.solr.util.LongSet; /** * A graph hit collector. This accumulates the edges for a given graph traversal. @@ -37,17 +58,14 @@ import org.apache.solr.search.DocSet; * already traversed. * @lucene.internal */ -class GraphTermsCollector extends SimpleCollector implements Collector { - - // the field to collect edge ids from - private String field; - // all the collected terms - private BytesRefHash collectorTerms; - private SortedSetDocValues docTermOrds; +abstract class GraphEdgeCollector extends SimpleCollector implements Collector { + + GraphQuery.GraphQueryWeight weight; + // the result set that is being collected. - private Bits currentResult; + Bits currentResult; // known leaf nodes - private DocSet leafNodes; + DocSet leafNodes; // number of hits discovered at this level. int numHits=0; BitSet bits; @@ -56,11 +74,10 @@ class GraphTermsCollector extends SimpleCollector implements Collector { int baseInParent; // if we care to track this. boolean hasCycles = false; - - GraphTermsCollector(String field,int maxDoc, Bits currentResult, DocSet leafNodes) { - this.field = field; + + GraphEdgeCollector(GraphQuery.GraphQueryWeight weight, int maxDoc, Bits currentResult, DocSet leafNodes) { + this.weight = weight; this.maxDoc = maxDoc; - this.collectorTerms = new BytesRefHash(); this.currentResult = currentResult; this.leafNodes = leafNodes; if (bits==null) { @@ -80,29 +97,14 @@ class GraphTermsCollector extends SimpleCollector implements Collector { // collect the docs addDocToResult(doc); // Optimization to not look up edges for a document that is a leaf node - if (!leafNodes.exists(doc)) { + if (leafNodes == null || !leafNodes.exists(doc)) { addEdgeIdsToResult(doc-base); } // Note: tracking links in for each result would be a huge memory hog... so not implementing at this time. } - private void addEdgeIdsToResult(int doc) throws IOException { - // set the doc to pull the edges ids for. - if (doc > docTermOrds.docID()) { - docTermOrds.advance(doc); - } - if (doc == docTermOrds.docID()) { - BytesRef edgeValue = new BytesRef(); - long ord; - while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { - // TODO: handle non string type fields. - edgeValue = docTermOrds.lookupOrd(ord); - // add the edge id to the collector terms. - collectorTerms.add(edgeValue); - } - } - } + abstract void addEdgeIdsToResult(int doc) throws IOException; private void addDocToResult(int docWithBase) { // this document is part of the traversal. mark it in our bitmap. @@ -121,14 +123,25 @@ class GraphTermsCollector extends SimpleCollector implements Collector { @Override public void doSetNextReader(LeafReaderContext context) throws IOException { - // Grab the updated doc values. - docTermOrds = DocValues.getSortedSet(context.reader(), field); base = context.docBase; baseInParent = context.docBaseInParent; } - - public BytesRefHash getCollectorTerms() { - return collectorTerms; + + protected abstract Query getResultQuery(); + + public Query getFrontierQuery() { + Query q = getResultQuery(); + if (q == null) return null; + + // If there is a filter to be used while crawling the graph, add that. + if (weight.getGraphQuery().getTraversalFilter() != null) { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(q, BooleanClause.Occur.MUST); + builder.add(weight.getGraphQuery().getTraversalFilter(), BooleanClause.Occur.MUST); + q = builder.build(); + } + + return q; } @Override @@ -137,3 +150,180 @@ class GraphTermsCollector extends SimpleCollector implements Collector { } } + +class GraphTermsCollector extends GraphEdgeCollector { + // all the collected terms + private BytesRefHash collectorTerms; + private SortedSetDocValues docTermOrds; + + + GraphTermsCollector(GraphQuery.GraphQueryWeight weight, int maxDoc, Bits currentResult, DocSet leafNodes) { + super(weight, maxDoc, currentResult, leafNodes); + this.collectorTerms = new BytesRefHash(); + } + + @Override + public void doSetNextReader(LeafReaderContext context) throws IOException { + super.doSetNextReader(context); + // Grab the updated doc values. + docTermOrds = DocValues.getSortedSet(context.reader(), weight.getGraphQuery().getToField()); + } + + @Override + void addEdgeIdsToResult(int doc) throws IOException { + // set the doc to pull the edges ids for. + if (doc > docTermOrds.docID()) { + docTermOrds.advance(doc); + } + if (doc == docTermOrds.docID()) { + BytesRef edgeValue = new BytesRef(); + long ord; + while ((ord = docTermOrds.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { + edgeValue = docTermOrds.lookupOrd(ord); + // add the edge id to the collector terms. + collectorTerms.add(edgeValue); + } + } + } + + @Override + protected Query getResultQuery() { + if (collectorTerms == null || collectorTerms.size() == 0) { + // return null if there are no terms (edges) to traverse. + return null; + } else { + // Create a query + Query q = null; + + GraphQuery gq = weight.getGraphQuery(); + // TODO: see if we should dynamically select this based on the frontier size. + if (gq.isUseAutn()) { + // build an automaton based query for the frontier. + Automaton autn = buildAutomaton(collectorTerms); + AutomatonQuery autnQuery = new AutomatonQuery(new Term(gq.getFromField()), autn); + q = autnQuery; + } else { + List termList = new ArrayList<>(collectorTerms.size()); + for (int i = 0 ; i < collectorTerms.size(); i++) { + BytesRef ref = new BytesRef(); + collectorTerms.get(i, ref); + termList.add(ref); + } + q = new TermInSetQuery(gq.getFromField(), termList); + } + + return q; + } + } + + + /** Build an automaton to represent the frontier query */ + private Automaton buildAutomaton(BytesRefHash termBytesHash) { + // need top pass a sorted set of terms to the autn builder (maybe a better way to avoid this?) + final TreeSet terms = new TreeSet(); + for (int i = 0 ; i < termBytesHash.size(); i++) { + BytesRef ref = new BytesRef(); + termBytesHash.get(i, ref); + terms.add(ref); + } + final Automaton a = DaciukMihovAutomatonBuilder.build(terms); + return a; + } +} + + +class GraphPointsCollector extends GraphEdgeCollector { + final LongSet set = new LongSet(256); + + SortedNumericDocValues values = null; + + GraphPointsCollector(GraphQuery.GraphQueryWeight weight, int maxDoc, Bits currentResult, DocSet leafNodes) { + super(weight, maxDoc, currentResult, leafNodes); + } + + @Override + public void doSetNextReader(LeafReaderContext context) throws IOException { + super.doSetNextReader(context); + values = DocValues.getSortedNumeric(context.reader(), weight.getGraphQuery().getToField()); + } + + @Override + void addEdgeIdsToResult(int doc) throws IOException { + // set the doc to pull the edges ids for. + int valuesDoc = values.docID(); + if (valuesDoc < doc) { + valuesDoc = values.advance(doc); + } + if (valuesDoc == doc) { + int count = values.docValueCount(); + for (int i = 0; i < count; i++) { + long v = values.nextValue(); + set.add(v); + } + } + } + + @Override + protected Query getResultQuery() { + if (set.cardinality() == 0) return null; + + Query q = null; + SchemaField sfield = weight.fromSchemaField; + NumberType ntype = sfield.getType().getNumberType(); + boolean multiValued = sfield.multiValued(); + + if (ntype == NumberType.LONG || ntype == NumberType.DATE) { + long[] vals = new long[set.cardinality()]; + int i = 0; + for (LongIterator iter = set.iterator(); iter.hasNext(); ) { + long bits = iter.next(); + long v = bits; + vals[i++] = v; + } + q = LongPoint.newSetQuery(sfield.getName(), vals); + } else if (ntype == NumberType.INTEGER) { + int[] vals = new int[set.cardinality()]; + int i = 0; + for (LongIterator iter = set.iterator(); iter.hasNext(); ) { + long bits = iter.next(); + int v = (int)bits; + vals[i++] = v; + } + q = IntPoint.newSetQuery(sfield.getName(), vals); + } else if (ntype == NumberType.DOUBLE) { + double[] vals = new double[set.cardinality()]; + int i = 0; + for (LongIterator iter = set.iterator(); iter.hasNext(); ) { + long bits = iter.next(); + double v = multiValued ? NumericUtils.sortableLongToDouble(bits) : Double.longBitsToDouble(bits); + vals[i++] = v; + } + q = DoublePoint.newSetQuery(sfield.getName(), vals); + } else if (ntype == NumberType.FLOAT) { + float[] vals = new float[set.cardinality()]; + int i = 0; + for (LongIterator iter = set.iterator(); iter.hasNext(); ) { + long bits = iter.next(); + float v = multiValued ? NumericUtils.sortableIntToFloat((int) bits) : Float.intBitsToFloat((int) bits); + vals[i++] = v; + } + q = FloatPoint.newSetQuery(sfield.getName(), vals); + } + + return q; + } + + + /** Build an automaton to represent the frontier query */ + private Automaton buildAutomaton(BytesRefHash termBytesHash) { + // need top pass a sorted set of terms to the autn builder (maybe a better way to avoid this?) + final TreeSet terms = new TreeSet(); + for (int i = 0 ; i < termBytesHash.size(); i++) { + BytesRef ref = new BytesRef(); + termBytesHash.get(i, ref); + terms.add(ref); + } + final Automaton a = DaciukMihovAutomatonBuilder.build(terms); + return a; + } +} diff --git a/solr/core/src/java/org/apache/solr/util/hll/LongIterator.java b/solr/core/src/java/org/apache/solr/util/LongIterator.java similarity index 86% rename from solr/core/src/java/org/apache/solr/util/hll/LongIterator.java rename to solr/core/src/java/org/apache/solr/util/LongIterator.java index a584ccca814..654c9a597c0 100644 --- a/solr/core/src/java/org/apache/solr/util/hll/LongIterator.java +++ b/solr/core/src/java/org/apache/solr/util/LongIterator.java @@ -14,13 +14,13 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.solr.util.hll; +package org.apache.solr.util; /** * A long-based iterator. This is not is-a {@link java.util.Iterator} * to prevent autoboxing between Long and long. */ -interface LongIterator { +public interface LongIterator { /** * @return true if and only if there are more elements to * iterate over. false otherwise. @@ -28,7 +28,7 @@ interface LongIterator { boolean hasNext(); /** - * @return the next long in the collection. + * @return the next long in the collection. Only valid after hasNext() has been called and returns true. */ long next(); } \ No newline at end of file diff --git a/solr/core/src/java/org/apache/solr/util/LongSet.java b/solr/core/src/java/org/apache/solr/util/LongSet.java new file mode 100644 index 00000000000..e649e04177e --- /dev/null +++ b/solr/core/src/java/org/apache/solr/util/LongSet.java @@ -0,0 +1,135 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.util; + + +/** Collects long values in a hash set (closed hashing on power-of-two sized long[]) + * @lucene.internal + */ +public class LongSet { + + private static final float LOAD_FACTOR = 0.7f; + + private long[] vals; + private int cardinality; + private int mask; + private int threshold; + private int zeroCount; // 1 if a 0 was collected + + public LongSet(int sz) { + sz = Math.max(org.apache.lucene.util.BitUtil.nextHighestPowerOfTwo(sz), 2); + vals = new long[sz]; + mask = sz - 1; + threshold = (int) (sz * LOAD_FACTOR); + } + + /** Returns the long[] array that has entries filled in with values or "0" for empty. + * To see if "0" itself is in the set, call containsZero() + */ + public long[] getBackingArray() { + return vals; + } + + public boolean containsZero() { + return zeroCount != 0; + } + + /** Adds an additional value to the set */ + public void add(long val) { + if (val == 0) { + zeroCount = 1; + return; + } + if (cardinality >= threshold) { + rehash(); + } + + // For floats: exponent bits start at bit 23 for single precision, + // and bit 52 for double precision. + // Many values will only have significant bits just to the right of that. + + // For now, lets just settle to get first 8 significant mantissa bits of double or float in the lowest bits of our hash + // The upper bits of our hash will be irrelevant. + int h = (int) (val + (val >>> 44) + (val >>> 15)); + for (int slot = h & mask; ; slot = (slot + 1) & mask) { + long v = vals[slot]; + if (v == 0) { + vals[slot] = val; + cardinality++; + break; + } else if (v == val) { + // val is already in the set + break; + } + } + } + + private void rehash() { + long[] oldVals = vals; + int newCapacity = vals.length << 1; + vals = new long[newCapacity]; + mask = newCapacity - 1; + threshold = (int) (newCapacity * LOAD_FACTOR); + cardinality = 0; + + for (long val : oldVals) { + if (val != 0) { + add(val); + } + } + } + + /** The number of values in the set */ + public int cardinality() { + return cardinality + zeroCount; + } + + + /** Returns an iterator over the values in the set. + * hasNext() must return true for next() to return a valid value. + */ + public LongIterator iterator() { + return new LongIterator() { + private boolean hasNext = zeroCount > 0; + private int i = -1; + private long value = 0; + + @Override + public boolean hasNext() { + if (hasNext) { + // this is only executed the first time for the special case 0 value + return true; + } + while (++i < vals.length) { + value = vals[i]; + if (value != 0) { + return hasNext = true; + } + } + return false; + } + + @Override + public long next() { + hasNext = false; + return value; + } + + }; + } +} diff --git a/solr/core/src/java/org/apache/solr/util/hll/BitVector.java b/solr/core/src/java/org/apache/solr/util/hll/BitVector.java index 2545e43432a..6c62b1ef518 100644 --- a/solr/core/src/java/org/apache/solr/util/hll/BitVector.java +++ b/solr/core/src/java/org/apache/solr/util/hll/BitVector.java @@ -16,6 +16,8 @@ */ package org.apache.solr.util.hll; +import org.apache.solr.util.LongIterator; + /** * A vector (array) of bits that is accessed in units ("registers") of width * bits which are stored as 64bit "words" (longs). In this context diff --git a/solr/core/src/java/org/apache/solr/util/hll/HLL.java b/solr/core/src/java/org/apache/solr/util/hll/HLL.java index 6bcaee46987..26bfa89472a 100644 --- a/solr/core/src/java/org/apache/solr/util/hll/HLL.java +++ b/solr/core/src/java/org/apache/solr/util/hll/HLL.java @@ -22,6 +22,7 @@ import com.carrotsearch.hppc.IntByteHashMap; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.cursors.IntByteCursor; import com.carrotsearch.hppc.cursors.LongCursor; +import org.apache.solr.util.LongIterator; /** * A probabilistic set of hashed long elements. Useful for computing diff --git a/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml b/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml index 127b291f4cb..1135d20485f 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema_latest.xml @@ -240,6 +240,16 @@ + + + + + + + + + + @@ -354,6 +364,13 @@ field first in an ascending sort and last in a descending sort. --> + + + + + + + - + @@ -123,52 +125,28 @@ EXAMPLE: name="*_i" will match any field ending in _i (like myid_i, z_i) RESTRICTION: the glob-like pattern in the name attribute must have a "*" only at the start or the end. --> - - + + - - + + - - - - + + + + - - + + - - - - - - - - - - - - - - - - - - - - - - - - @@ -199,8 +177,7 @@ @@ -238,42 +214,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/solr/example/example-DIH/solr/db/conf/managed-schema b/solr/example/example-DIH/solr/db/conf/managed-schema index b96ff4c0cd6..df01bc02d3d 100644 --- a/solr/example/example-DIH/solr/db/conf/managed-schema +++ b/solr/example/example-DIH/solr/db/conf/managed-schema @@ -65,21 +65,21 @@ --> - + --> - + - - + + - - - + + + - - - - + + + + - - + + - - - - - - - @@ -310,8 +303,7 @@ + --> - - - - - - - - - - + + + + + + + + + - - - - - - + + + + diff --git a/solr/example/example-DIH/solr/mail/conf/managed-schema b/solr/example/example-DIH/solr/mail/conf/managed-schema index e027717cfca..06c8e7d0262 100644 --- a/solr/example/example-DIH/solr/mail/conf/managed-schema +++ b/solr/example/example-DIH/solr/mail/conf/managed-schema @@ -65,21 +65,21 @@ --> - + --> - + @@ -121,7 +121,7 @@ - + @@ -135,38 +135,31 @@ RESTRICTION: the glob-like pattern in the name attribute must have a "*" only at the start or the end. --> - - + + - - - + + + - - - - + + + + - - + + - - - - - - - @@ -229,8 +222,7 @@ + --> - - - - - - - - - - + + + + + + + + + - - - - - - + + + + @@ -419,7 +395,6 @@ diff --git a/solr/example/example-DIH/solr/solr/conf/managed-schema b/solr/example/example-DIH/solr/solr/conf/managed-schema index 338aa358ba4..d6de5f5daaf 100644 --- a/solr/example/example-DIH/solr/solr/conf/managed-schema +++ b/solr/example/example-DIH/solr/solr/conf/managed-schema @@ -65,21 +65,21 @@ --> - + --> - + - - + + - - - + + + - - - - + + + + - - + + - - - - - - - @@ -278,17 +271,17 @@ - - - + - + + --> - - - - - - - - - - + + + + + + + + + - - - - - - + + + + diff --git a/solr/example/files/conf/managed-schema b/solr/example/files/conf/managed-schema index fe91a5d8a07..97742740076 100644 --- a/solr/example/files/conf/managed-schema +++ b/solr/example/files/conf/managed-schema @@ -14,8 +14,6 @@ - - @@ -24,17 +22,9 @@ - - - - - - - - @@ -61,10 +51,6 @@ - - - - @@ -413,12 +399,6 @@ - - - - - - @@ -449,7 +429,7 @@ - + @@ -501,46 +481,30 @@ - - - - - - - - - - - - - - + + - + - - - + + + - - - - - + - + - - + + diff --git a/solr/server/solr/configsets/_default/conf/managed-schema b/solr/server/solr/configsets/_default/conf/managed-schema index 27c7bf395a1..72b138f8d8e 100644 --- a/solr/server/solr/configsets/_default/conf/managed-schema +++ b/solr/server/solr/configsets/_default/conf/managed-schema @@ -67,9 +67,11 @@ recommended (required, if you are using *Point fields) for faceting, grouping, sorting and function queries. Doc Values will make the index faster to load, more NRT-friendly and more memory-efficient. - They are currently only supported by StrField, UUIDField, all Trie*Fields and *PointFields. - Some field types may have limitations on using Doc Values (check the - documentation of the field type you're interested in for more information) + They are currently only supported by StrField, UUIDField, all + *PointFields, and depending on the field type, they might require + the field to be single-valued, be required or have a default value + (check the documentation of the field type you're interested in for + more information) multiValued: true if this field may contain multiple values per document omitNorms: (expert) set to true to omit the norms associated with this field (this disables length normalization and index-time @@ -110,7 +112,7 @@ - + @@ -123,52 +125,28 @@ EXAMPLE: name="*_i" will match any field ending in _i (like myid_i, z_i) RESTRICTION: the glob-like pattern in the name attribute must have a "*" only at the start or the end. --> - - + + - - + + - - - - + + + + - - + + - - - - - - - - - - - - - - - - - - - - - - - - @@ -199,8 +177,7 @@ @@ -238,42 +214,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema b/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema index cd33c4f1068..5d1986c910d 100644 --- a/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema +++ b/solr/server/solr/configsets/sample_techproducts_configs/conf/managed-schema @@ -76,10 +76,10 @@ grouping, sorting and function queries. Doc values will make the index faster to load, more NRT-friendly and more memory-efficient. They however come with some limitations: they are currently only - supported by StrField, UUIDField, all Trie*Fields and *PointFields, - and depending on the field type, they might require the field to be - single-valued, be required or have a default value (check the - documentation of the field type you're interested in for more information) + supported by StrField, UUIDField, all *PointFields, and depending + on the field type, they might require the field to be single-valued, + be required or have a default value (check the documentation + of the field type you're interested in for more information) multiValued: true if this field may contain multiple values per document omitNorms: (expert) set to true to omit the norms associated with this field (this disables length normalization and index-time @@ -113,7 +113,7 @@ or Solr won't start. _version_ and update log are required for SolrCloud --> - + - - + + - - - + + + - - - - + + + + - - + + - - - - - - - - - - - - - - - - - - - @@ -249,8 +230,6 @@ alternately, change the type="ignored" to some other type e.g. "text" if you want unknown fields indexed and/or stored by default --> - - - - - - - - - - - - - @@ -328,8 +295,7 @@ @@ -356,32 +321,6 @@ - - - - - - - - - - - - - - - - - @@ -757,8 +691,7 @@ special relevancy modes: score=overlapRatio|area|area2D (local-param to the query). DocValues is recommended for relevancy. --> - + geo="true" distanceUnits="kilometers" numberType="pdouble" /> + + + + + + + + + diff --git a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java index 04d704d9d74..8192406c6a7 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestPointFields.java +++ b/solr/core/src/test/org/apache/solr/schema/TestPointFields.java @@ -2648,8 +2648,9 @@ public class TestPointFields extends SolrTestCaseJ4 { private void doTestPointFieldMultiValuedRangeQuery(String fieldName, String type, String[] numbers) throws Exception { assert numbers != null && numbers.length == 20; - assertTrue(h.getCore().getLatestSchema().getField(fieldName).multiValued()); - assertTrue(h.getCore().getLatestSchema().getField(fieldName).getType() instanceof PointField); + SchemaField sf = h.getCore().getLatestSchema().getField(fieldName); + assertTrue(sf.multiValued()); + assertTrue(sf.getType() instanceof PointField); for (int i=9; i >= 0; i--) { assertU(adoc("id", String.valueOf(i), fieldName, numbers[i], fieldName, numbers[i+10])); } @@ -2723,6 +2724,11 @@ public class TestPointFields extends SolrTestCaseJ4 { "fl", "id, " + fieldName, "sort", "id asc"), "//*[@numFound='1']", "//result/doc[1]/arr[@name='" + fieldName + "']/" + type + "[1][.='" + numbers[0] + "']"); + + if (sf.getType().getNumberType() == NumberType.FLOAT || sf.getType().getNumberType() == NumberType.DOUBLE) { + doTestDoubleFloatRangeLimits(fieldName, sf.getType().getNumberType() == NumberType.DOUBLE); + } + } private void doTestPointFieldMultiValuedFacetField(String nonDocValuesField, String dvFieldName, String[] numbers) throws Exception { @@ -3191,45 +3197,98 @@ public class TestPointFields extends SolrTestCaseJ4 { assertQ(req("q", fieldName + ":{" + arr[0] + " TO " + arr[i] + "}", "fl", "id, " + fieldName), "//*[@numFound='" + (Math.max(0, i-1)) + "']"); } - - clearIndex(); - assertU(adoc("id", "1", fieldName, String.valueOf(Float.MAX_VALUE))); - assertU(adoc("id", "2", fieldName, String.valueOf(Float.MIN_VALUE))); - assertU(adoc("id", "3", fieldName, String.valueOf(Float.NEGATIVE_INFINITY))); - assertU(adoc("id", "4", fieldName, String.valueOf(Float.POSITIVE_INFINITY))); - assertU(commit()); - assertQ(req("q", fieldName + ":[* TO *]", "fl", "id, " + fieldName), - "//*[@numFound='4']"); -// TODO: Awaits fix: SOLR-11070 -// assertQ(req("q", fieldName + ":{* TO *}", "fl", "id, " + fieldName), -// "//*[@numFound='4']"); - assertQ(req("q", fieldName + ":[" + Float.MIN_VALUE + " TO " + Float.MAX_VALUE + "]", "fl", "id, " + fieldName), - "//*[@numFound='2']"); - assertQ(req("q", fieldName + ":{" + Float.MIN_VALUE + " TO " + Float.MAX_VALUE + "]", "fl", "id, " + fieldName), - "//*[@numFound='1']"); - assertQ(req("q", fieldName + ":[" + Float.MIN_VALUE + " TO " + Float.MAX_VALUE + "}", "fl", "id, " + fieldName), - "//*[@numFound='1']"); - if (testDouble) { - assertQ(req("q", fieldName + ":[" + Double.MIN_VALUE + " TO " + Double.MIN_VALUE + "}", "fl", "id, " + fieldName), - "//*[@numFound='0']"); - assertQ(req("q", fieldName + ":{" + Double.MAX_VALUE + " TO " + Double.MAX_VALUE + "]", "fl", "id, " + fieldName), - "//*[@numFound='0']"); - assertQ(req("q", fieldName + ":{" + Double.NEGATIVE_INFINITY + " TO " + Double.NEGATIVE_INFINITY + "]", "fl", "id, " + fieldName), - "//*[@numFound='0']"); - assertQ(req("q", fieldName + ":[" + Double.POSITIVE_INFINITY + " TO " + Double.POSITIVE_INFINITY + "}", "fl", "id, " + fieldName), - "//*[@numFound='0']"); - } else { - assertQ(req("q", fieldName + ":[" + Float.MIN_VALUE + " TO " + Float.MIN_VALUE + "}", "fl", "id, " + fieldName), - "//*[@numFound='0']"); - assertQ(req("q", fieldName + ":{" + Float.MAX_VALUE + " TO " + Float.MAX_VALUE + "]", "fl", "id, " + fieldName), - "//*[@numFound='0']"); - assertQ(req("q", fieldName + ":{" + Float.NEGATIVE_INFINITY + " TO " + Float.NEGATIVE_INFINITY + "]", "fl", "id, " + fieldName), - "//*[@numFound='0']"); - assertQ(req("q", fieldName + ":[" + Float.POSITIVE_INFINITY + " TO " + Float.POSITIVE_INFINITY + "}", "fl", "id, " + fieldName), - "//*[@numFound='0']"); - } + doTestDoubleFloatRangeLimits(fieldName, testDouble); } + private void doTestDoubleFloatRangeLimits(String fieldName, boolean testDouble) { + // POSITIVE/NEGATIVE_INFINITY toString is the same for Double and Float, it's OK to use this code for both cases + String positiveInfinity = String.valueOf(Double.POSITIVE_INFINITY); + String negativeInfinity = String.valueOf(Double.NEGATIVE_INFINITY); + String minVal = String.valueOf(testDouble?Double.MIN_VALUE:Float.MIN_VALUE); + String maxVal = String.valueOf(testDouble?Double.MAX_VALUE:Float.MAX_VALUE); + String negativeMinVal = "-" + minVal; + String negativeMaxVal = "-" + maxVal; + clearIndex(); + assertU(adoc("id", "1", fieldName, minVal)); + assertU(adoc("id", "2", fieldName, maxVal)); + assertU(adoc("id", "3", fieldName, negativeInfinity)); + assertU(adoc("id", "4", fieldName, positiveInfinity)); + assertU(adoc("id", "5", fieldName, negativeMinVal)); + assertU(adoc("id", "6", fieldName, negativeMaxVal)); + assertU(commit()); + //negative to negative + assertAllInclusiveExclusiveVariations(fieldName, "*", "-1", 2, 2, 2, 2); + assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, "-1", 1, 2, 1, 2); + assertAllInclusiveExclusiveVariations(fieldName, negativeMaxVal, negativeMinVal, 0, 1, 1, 2); + //negative to cero + assertAllInclusiveExclusiveVariations(fieldName, "*", "-0.0f", 3, 3, 3, 3); + assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, "-0.0f", 2, 3, 2, 3); + assertAllInclusiveExclusiveVariations(fieldName, negativeMinVal, "-0.0f", 0, 1, 0, 1); + + assertAllInclusiveExclusiveVariations(fieldName, "*", "0", 3, 3, 3, 3); + assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, "0", 2, 3, 2, 3); + assertAllInclusiveExclusiveVariations(fieldName, negativeMinVal, "0", 0, 1, 0, 1); + //negative to positive + assertAllInclusiveExclusiveVariations(fieldName, "*", "1", 4, 4, 4, 4); + assertAllInclusiveExclusiveVariations(fieldName, "-1", "*", 4, 4, 4, 4); + assertAllInclusiveExclusiveVariations(fieldName, "-1", "1", 2, 2, 2, 2); + assertAllInclusiveExclusiveVariations(fieldName, "*", "*", 6, 6, 6, 6); + + assertAllInclusiveExclusiveVariations(fieldName, "-1", positiveInfinity, 3, 3, 4, 4); + assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, "1", 3, 4, 3, 4); + assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, positiveInfinity, 4, 5, 5, 6); + + assertAllInclusiveExclusiveVariations(fieldName, negativeMinVal, minVal, 0, 1, 1, 2); + assertAllInclusiveExclusiveVariations(fieldName, negativeMaxVal, maxVal, 2, 3, 3, 4); + //cero to positive + assertAllInclusiveExclusiveVariations(fieldName, "-0.0f", "*", 3, 3, 3, 3); + assertAllInclusiveExclusiveVariations(fieldName, "-0.0f", positiveInfinity, 2, 2, 3, 3); + assertAllInclusiveExclusiveVariations(fieldName, "-0.0f", minVal, 0, 0, 1, 1); + + assertAllInclusiveExclusiveVariations(fieldName, "0", "*", 3, 3, 3, 3); + assertAllInclusiveExclusiveVariations(fieldName, "0", positiveInfinity, 2, 2, 3, 3); + assertAllInclusiveExclusiveVariations(fieldName, "0", minVal, 0, 0, 1, 1); + //positive to positive + assertAllInclusiveExclusiveVariations(fieldName, "1", "*", 2, 2, 2, 2); + assertAllInclusiveExclusiveVariations(fieldName, "1", positiveInfinity, 1, 1, 2, 2); + assertAllInclusiveExclusiveVariations(fieldName, minVal, maxVal, 0, 1, 1, 2); + + // inverted limits + assertAllInclusiveExclusiveVariations(fieldName, "1", "-1", 0, 0, 0, 0); + assertAllInclusiveExclusiveVariations(fieldName, positiveInfinity, negativeInfinity, 0, 0, 0, 0); + assertAllInclusiveExclusiveVariations(fieldName, minVal, negativeMinVal, 0, 0, 0, 0); + + // MatchNoDocs cases + assertAllInclusiveExclusiveVariations(fieldName, negativeInfinity, negativeInfinity, 0, 0, 0, 1); + assertAllInclusiveExclusiveVariations(fieldName, positiveInfinity, positiveInfinity, 0, 0, 0, 1); + + clearIndex(); + assertU(adoc("id", "1", fieldName, "0.0")); + assertU(adoc("id", "2", fieldName, "-0.0")); + assertU(commit()); + assertAllInclusiveExclusiveVariations(fieldName, "*", "*", 2, 2, 2, 2); + assertAllInclusiveExclusiveVariations(fieldName, "*", "0", 1, 1, 2, 2); + assertAllInclusiveExclusiveVariations(fieldName, "0", "*", 0, 1, 0, 1); + assertAllInclusiveExclusiveVariations(fieldName, "*", "-0.0f", 0, 0, 1, 1); + assertAllInclusiveExclusiveVariations(fieldName, "-0.0f", "*", 1, 2, 1, 2); + assertAllInclusiveExclusiveVariations(fieldName, "-0.0f", "0", 0, 1, 1, 2); + } + + private void assertAllInclusiveExclusiveVariations(String fieldName, String min, String max, + int countExclusiveExclusive, + int countInclusiveExclusive, + int countExclusiveInclusive, + int countInclusiveInclusive) { + assertQ(req("q", fieldName + ":{" + min + " TO " + max + "}", "fl", "id, " + fieldName), + "//*[@numFound='" + countExclusiveExclusive +"']"); + assertQ(req("q", fieldName + ":[" + min + " TO " + max + "}", "fl", "id, " + fieldName), + "//*[@numFound='" + countInclusiveExclusive +"']"); + assertQ(req("q", fieldName + ":{" + min + " TO " + max + "]", "fl", "id, " + fieldName), + "//*[@numFound='" + countExclusiveInclusive +"']"); + assertQ(req("q", fieldName + ":[" + min + " TO " + max + "]", "fl", "id, " + fieldName), + "//*[@numFound='" + countInclusiveInclusive +"']"); + } + private void doTestFloatPointFunctionQuery(String field) throws Exception { assertTrue(h.getCore().getLatestSchema().getField(field).getType() instanceof PointField); int numVals = 10 * RANDOM_MULTIPLIER; diff --git a/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java b/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java index 6591218024b..763c8d5380e 100644 --- a/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java +++ b/solr/core/src/test/org/apache/solr/search/TestRangeQuery.java @@ -16,20 +16,33 @@ */ package org.apache.solr.search; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; import org.apache.lucene.util.TestUtil; - import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.ResultContext; import org.apache.solr.response.SolrQueryResponse; +import org.apache.solr.schema.FieldType; +import org.apache.solr.schema.NumberType; +import org.apache.solr.schema.StrField; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; -import java.util.*; - public class TestRangeQuery extends SolrTestCaseJ4 { + + private final static long DATE_START_TIME_RANDOM_TEST = 1499797224224L; + private final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'", Locale.ROOT); @BeforeClass public static void beforeClass() throws Exception { @@ -385,6 +398,292 @@ public class TestRangeQuery extends SolrTestCaseJ4 { expectThrows(SyntaxError.class, () -> QParser.getParser("[A TO]", req("df", "text")).getQuery()); } + public void testCompareTypesRandomRangeQueries() throws Exception { + int cardinality = 10000; + Map types = new HashMap<>(); //single and multivalued field types + Map typesMv = new HashMap<>(); // multivalued field types only + types.put(NumberType.INTEGER, new String[]{"ti", "ti_dv", "ti_ni_dv", "i_p", "i_ni_p", "i_ndv_p", "tis", "tis_dv", "tis_ni_dv", "is_p", "is_ni_p", "is_ndv_p"}); + types.put(NumberType.LONG, new String[]{"tl", "tl_dv", "tl_ni_dv", "l_p", "l_ni_p", "l_ndv_p", "tls", "tls_dv", "tls_ni_dv", "ls_p", "ls_ni_p", "ls_ndv_p"}); + types.put(NumberType.FLOAT, new String[]{"tf", "tf_dv", "tf_ni_dv", "f_p", "f_ni_p", "f_ndv_p", "tfs", "tfs_dv", "tfs_ni_dv", "fs_p", "fs_ni_p", "fs_ndv_p"}); + types.put(NumberType.DOUBLE, new String[]{"td", "td_dv", "td_ni_dv", "d_p", "d_ni_p", "d_ndv_p", "tds", "tds_dv", "tds_ni_dv", "ds_p", "ds_ni_p", "ds_ndv_p"}); + types.put(NumberType.DATE, new String[]{"tdt", "tdt_dv", "tdt_ni_dv", "dt_p", "dt_ni_p", "dt_ndv_p", "tdts", "tdts_dv", "tdts_ni_dv", "dts_p", "dts_ni_p", "dts_ndv_p"}); + typesMv.put(NumberType.INTEGER, new String[]{"tis", "tis_dv", "tis_ni_dv", "is_p", "is_ni_p", "is_ndv_p"}); + typesMv.put(NumberType.LONG, new String[]{"tls", "tls_dv", "tls_ni_dv", "ls_p", "ls_ni_p", "ls_ndv_p"}); + typesMv.put(NumberType.FLOAT, new String[]{"tfs", "tfs_dv", "tfs_ni_dv", "fs_p", "fs_ni_p", "fs_ndv_p"}); + typesMv.put(NumberType.DOUBLE, new String[]{"tds", "tds_dv", "tds_ni_dv", "ds_p", "ds_ni_p", "ds_ndv_p"}); + typesMv.put(NumberType.DATE, new String[]{"tdts", "tdts_dv", "tdts_ni_dv", "dts_p", "dts_ni_p", "dts_ndv_p"}); + + for (int i = 0; i < atLeast(500); i++) { + if (random().nextInt(50) == 0) { + //have some empty docs + assertU(adoc("id", String.valueOf(i))); + continue; + } + + if (random().nextInt(100) == 0 && i > 0) { + //delete some docs + assertU(delI(String.valueOf(i - 1))); + } + SolrInputDocument document = new SolrInputDocument(); + document.setField("id", i); + for (Map.Entry entry:types.entrySet()) { + NumberType type = entry.getKey(); + String val = null; + List vals = null; + switch (type) { + case DATE: + val = randomDate(cardinality); + vals = getRandomDates(random().nextInt(10), cardinality); + break; + case DOUBLE: + val = String.valueOf(randomDouble(cardinality)); + vals = toStringList(getRandomDoubles(random().nextInt(10), cardinality)); + break; + case FLOAT: + val = String.valueOf(randomFloat(cardinality)); + vals = toStringList(getRandomFloats(random().nextInt(10), cardinality)); + break; + case INTEGER: + val = String.valueOf(randomInt(cardinality)); + vals = toStringList(getRandomInts(random().nextInt(10), cardinality)); + break; + case LONG: + val = String.valueOf(randomLong(cardinality)); + vals = toStringList(getRandomLongs(random().nextInt(10), cardinality)); + break; + default: + throw new AssertionError(); + + } + // SingleValue + for (String fieldSuffix:entry.getValue()) { + document.setField("field_sv_" + fieldSuffix, val); + } + // MultiValue + for (String fieldSuffix:typesMv.get(type)) { + for (String value:vals) { + document.addField("field_mv_" + fieldSuffix, value); + } + } + } + + assertU(adoc(document)); + if (random().nextInt(50) == 0) { + assertU(commit()); + } + } + assertU(commit()); + + String[][] possibleTypes = new String[types.size()][]; + types.values().toArray(possibleTypes); + String[][] possibleTypesMv = new String[typesMv.size()][]; + typesMv.values().toArray(possibleTypesMv); + for (int i = 0; i < atLeast(1000); i++) { + doTestQuery(cardinality, false, pickRandom(possibleTypes)); + doTestQuery(cardinality, true, pickRandom(possibleTypesMv)); + } + } + + private void doTestQuery(int cardinality, boolean mv, String[] types) throws Exception { + String[] startOptions = new String[]{"{", "["}; + String[] endOptions = new String[]{"}", "]"}; + String[] qRange = getRandomRange(cardinality, types[0]); + String start = pickRandom(startOptions); + String end = pickRandom(endOptions); + long expectedHits = doRangeQuery(mv, start, end, types[0], qRange); + for (int i = 1; i < types.length; i++) { + assertEquals("Unexpected results from query when comparing " + types[0] + " with " + types[i] + " and query: " + + start + qRange[0] + " TO " + qRange[1] + end + "\n", + expectedHits, doRangeQuery(mv, start, end, types[i], qRange)); + } + } + + private long doRangeQuery(boolean mv, String start, String end, String field, String[] qRange) throws Exception { + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("q", "field_" + (mv?"mv_":"sv_") + field + ":" + start + qRange[0] + " TO " + qRange[1] + end); + SolrQueryRequest req = req(params); + try { + return (long) h.queryAndResponse("", req).getToLog().get("hits"); + } finally { + req.close(); + } + + } + + private String[] getRandomRange(int max, String fieldName) { + Number[] values = new Number[2]; + FieldType ft = h.getCore().getLatestSchema().getField("field_" + fieldName).getType(); + if (ft.getNumberType() == null) { + assert ft instanceof StrField; + values[0] = randomInt(max); + values[1] = randomInt(max); + Arrays.sort(values, (o1, o2) -> String.valueOf(o1).compareTo(String.valueOf(o2))); + } else { + switch (ft.getNumberType()) { + case DOUBLE: + values[0] = randomDouble(max); + values[1] = randomDouble(max); + break; + case FLOAT: + values[0] = randomFloat(max); + values[1] = randomFloat(max); + break; + case INTEGER: + values[0] = randomInt(max); + values[1] = randomInt(max); + break; + case LONG: + values[0] = randomLong(max); + values[1] = randomLong(max); + break; + case DATE: + values[0] = randomMs(max); + values[1] = randomMs(max); + break; + default: + throw new AssertionError("Unexpected number type"); + + } + if (random().nextInt(100) >= 1) {// sometimes don't sort the values. Should result in 0 hits + Arrays.sort(values); + } + } + String[] stringValues = new String[2]; + if (rarely()) { + stringValues[0] = "*"; + } else { + if (ft.getNumberType() == NumberType.DATE) { + stringValues[0] = dateFormat.format(values[0]); + } else { + stringValues[0] = String.valueOf(values[0]); + } + } + if (rarely()) { + stringValues[1] = "*"; + } else { + if (ft.getNumberType() == NumberType.DATE) { + stringValues[1] = dateFormat.format(values[1]); + } else { + stringValues[1] = String.valueOf(values[1]); + } + } + return stringValues; + } + + + // Helper methods + private String randomDate(int cardinality) { + return dateFormat.format(new Date(randomMs(cardinality))); + } + + private List getRandomDates(int numValues, int cardinality) { + List vals = new ArrayList<>(numValues); + for (int i = 0; i < numValues;i++) { + vals.add(randomDate(cardinality)); + } + return vals; + } + + private List getRandomDoubles(int numValues, int cardinality) { + List vals = new ArrayList<>(numValues); + for (int i = 0; i < numValues;i++) { + vals.add(randomDouble(cardinality)); + } + return vals; + } + + private List getRandomFloats(int numValues, int cardinality) { + List vals = new ArrayList<>(numValues); + for (int i = 0; i < numValues;i++) { + vals.add(randomFloat(cardinality)); + } + return vals; + } + + private List getRandomInts(int numValues, int cardinality) { + List vals = new ArrayList<>(numValues); + for (int i = 0; i < numValues;i++) { + vals.add(randomInt(cardinality)); + } + return vals; + } + + private List getRandomLongs(int numValues, int cardinality) { + List vals = new ArrayList<>(numValues); + for (int i = 0; i < numValues;i++) { + vals.add(randomLong(cardinality)); + } + return vals; + } + + List toStringList(List input) { + List newList = new ArrayList<>(input.size()); + for (T element:input) { + newList.add(String.valueOf(element)); + } + return newList; + } + + long randomMs(int cardinality) { + return DATE_START_TIME_RANDOM_TEST + random().nextInt(cardinality) * 1000 * (random().nextBoolean()?1:-1); + } + + double randomDouble(int cardinality) { + if (rarely()) { + int num = random().nextInt(8); + if (num == 0) return Double.NEGATIVE_INFINITY; + if (num == 1) return Double.POSITIVE_INFINITY; + if (num == 2) return Double.MIN_VALUE; + if (num == 3) return Double.MAX_VALUE; + if (num == 4) return -Double.MIN_VALUE; + if (num == 5) return -Double.MAX_VALUE; + if (num == 6) return 0.0d; + if (num == 7) return -0.0d; + } + Double d = Double.NaN; + while (d.isNaN()) { + d = random().nextDouble(); + } + return d * cardinality * (random().nextBoolean()?1:-1); + } + + float randomFloat(int cardinality) { + if (rarely()) { + int num = random().nextInt(8); + if (num == 0) return Float.NEGATIVE_INFINITY; + if (num == 1) return Float.POSITIVE_INFINITY; + if (num == 2) return Float.MIN_VALUE; + if (num == 3) return Float.MAX_VALUE; + if (num == 4) return -Float.MIN_VALUE; + if (num == 5) return -Float.MAX_VALUE; + if (num == 6) return 0.0f; + if (num == 7) return -0.0f; + } + Float f = Float.NaN; + while (f.isNaN()) { + f = random().nextFloat(); + } + return f * cardinality * (random().nextBoolean()?1:-1); + } + + int randomInt(int cardinality) { + if (rarely()) { + int num = random().nextInt(2); + if (num == 0) return Integer.MAX_VALUE; + if (num == 1) return Integer.MIN_VALUE; + } + return random().nextInt(cardinality) * (random().nextBoolean()?1:-1); + } + + long randomLong(int cardinality) { + if (rarely()) { + int num = random().nextInt(2); + if (num == 0) return Long.MAX_VALUE; + if (num == 1) return Long.MIN_VALUE; + } + return randomInt(cardinality); + } + static boolean sameDocs(String msg, DocSet a, DocSet b) { DocIterator i = a.iterator(); // System.out.println("SIZES="+a.size() + "," + b.size()); From 42c6703726f52752ad9273acf61361504b4141c1 Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Wed, 26 Jul 2017 15:46:04 -0400 Subject: [PATCH 13/95] SOLR-11151: remove unused imports --- .../apache/solr/handler/admin/MBeansHandlerTest.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java index e47dae5fcd2..37ea5bd5183 100644 --- a/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/admin/MBeansHandlerTest.java @@ -16,13 +16,9 @@ */ package org.apache.solr.handler.admin; -import javax.management.MBeanServer; -import javax.management.ObjectName; -import java.lang.management.ManagementFactory; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -31,12 +27,6 @@ import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.util.ContentStream; import org.apache.solr.common.util.ContentStreamBase; import org.apache.solr.common.util.NamedList; -import org.apache.solr.core.SolrCore; -import org.apache.solr.metrics.SolrCoreMetricManager; -import org.apache.solr.metrics.SolrMetricManager; -import org.apache.solr.metrics.SolrMetricReporter; -import org.apache.solr.metrics.reporters.JmxObjectNameFactory; -import org.apache.solr.metrics.reporters.SolrJmxReporter; import org.apache.solr.request.LocalSolrQueryRequest; import org.junit.BeforeClass; import org.junit.Test; From f6b0f965681b43387c0c4e37952d89ac731ce782 Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Wed, 26 Jul 2017 18:24:34 -0400 Subject: [PATCH 14/95] SOLR-10760: fix Solrj tests that depended on Trie fields --- .../solr/client/solrj/SolrExampleTests.java | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java index 68392bf275d..3a57e5a220c 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java @@ -412,15 +412,15 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase SolrQuery query = new SolrQuery(); query.set(CommonParams.QT, "/analysis/field"); - query.set(AnalysisParams.FIELD_TYPE, "int"); + query.set(AnalysisParams.FIELD_TYPE, "pint"); query.set(AnalysisParams.FIELD_VALUE, "ignore_exception"); try { client.query( query ); - Assert.fail("should have a number format exception"); + Assert.fail("should have a server exception"); } catch(SolrException ex) { - assertEquals(400, ex.code()); - assertThat(ex.getMessage(), containsString("Invalid Number: ignore_exception")); + assertEquals(500, ex.code()); + assertThat(ex.getMessage(), containsString(" Can't generate internal string in PointField. use PointField.toInternalByteRef")); } catch(Throwable t) { t.printStackTrace(); @@ -1937,14 +1937,14 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase server.deleteByQuery("*:*"); ArrayList docs = new ArrayList<>(); - docs.add( makeTestDoc("id","1", "term_s", "YYYY", "group_s", "group1", "test_ti", "5", "test_tl", "10", "test_tf", "2000", "type_s", "parent")); - docs.add( makeTestDoc("id","2", "term_s","YYYY", "group_s", "group1", "test_ti", "50", "test_tl", "100", "test_tf", "200", "type_s", "child")); - docs.add( makeTestDoc("id","3", "term_s", "YYYY", "test_ti", "5000", "test_tl", "100", "test_tf", "200")); - docs.add( makeTestDoc("id","4", "term_s", "YYYY", "test_ti", "500", "test_tl", "1000", "test_tf", "2000")); - docs.add( makeTestDoc("id","5", "term_s", "YYYY", "group_s", "group2", "test_ti", "4", "test_tl", "10", "test_tf", "2000", "type_s", "parent")); - docs.add( makeTestDoc("id","6", "term_s","YYYY", "group_s", "group2", "test_ti", "10", "test_tl", "100", "test_tf", "200", "type_s", "child")); - docs.add( makeTestDoc("id","7", "term_s", "YYYY", "group_s", "group1", "test_ti", "1", "test_tl", "100000", "test_tf", "2000", "type_s", "child")); - docs.add( makeTestDoc("id","8", "term_s","YYYY", "group_s", "group2", "test_ti", "2", "test_tl", "100000", "test_tf", "200", "type_s", "child")); + docs.add( makeTestDoc("id","1", "term_s", "YYYY", "group_s", "group1", "test_i", "5", "test_l", "10", "test_f", "2000", "type_s", "parent")); + docs.add( makeTestDoc("id","2", "term_s","YYYY", "group_s", "group1", "test_i", "50", "test_l", "100", "test_f", "200", "type_s", "child")); + docs.add( makeTestDoc("id","3", "term_s", "YYYY", "test_i", "5000", "test_l", "100", "test_f", "200")); + docs.add( makeTestDoc("id","4", "term_s", "YYYY", "test_i", "500", "test_l", "1000", "test_f", "2000")); + docs.add( makeTestDoc("id","5", "term_s", "YYYY", "group_s", "group2", "test_i", "4", "test_l", "10", "test_f", "2000", "type_s", "parent")); + docs.add( makeTestDoc("id","6", "term_s","YYYY", "group_s", "group2", "test_i", "10", "test_l", "100", "test_f", "200", "type_s", "child")); + docs.add( makeTestDoc("id","7", "term_s", "YYYY", "group_s", "group1", "test_i", "1", "test_l", "100000", "test_f", "2000", "type_s", "child")); + docs.add( makeTestDoc("id","8", "term_s","YYYY", "group_s", "group2", "test_i", "2", "test_l", "100000", "test_f", "200", "type_s", "child")); server.add(docs); server.commit(); @@ -1953,7 +1953,7 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase msParams.add("q", "*:*"); msParams.add("fq", "{!collapse field=group_s}"); msParams.add("defType", "edismax"); - msParams.add("bf", "field(test_ti)"); + msParams.add("bf", "field(test_i)"); msParams.add("expand", "true"); QueryResponse resp = server.query(msParams); From 9e8027d39a98524b473dc465d7e9a0e7a905dd73 Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Wed, 26 Jul 2017 18:42:17 -0400 Subject: [PATCH 15/95] SOLR-10760: fix another Solrj test --- .../test/org/apache/solr/client/solrj/SolrExampleTests.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java index 3a57e5a220c..39158cb366d 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java @@ -421,6 +421,9 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase catch(SolrException ex) { assertEquals(500, ex.code()); assertThat(ex.getMessage(), containsString(" Can't generate internal string in PointField. use PointField.toInternalByteRef")); + } + catch (SolrServerException ex) { + assertThat(ex.getMessage(), containsString(" Can't generate internal string in PointField. use PointField.toInternalByteRef")); } catch(Throwable t) { t.printStackTrace(); From 1582509d045fdf43ec4103f9c512e91e85f3e273 Mon Sep 17 00:00:00 2001 From: Tomas Fernandez Lobbe Date: Wed, 26 Jul 2017 17:06:38 -0700 Subject: [PATCH 16/95] SOLR-11070: Fix DocValuesTest.testFloatAndDoubleRangeQueryRandom Updated test to consider Infinity a match when using * in range query --- solr/core/src/test/org/apache/solr/schema/DocValuesTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java index 43c0006c782..dcda834dac8 100644 --- a/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java +++ b/solr/core/src/test/org/apache/solr/schema/DocValuesTest.java @@ -614,8 +614,8 @@ public class DocValuesTest extends SolrTestCaseJ4 { long minSortable = toSortableLong.get(i).apply(minVal); long maxSortable = toSortableLong.get(i).apply(maxVal); - if((minInclusive && minSortable<=valSortable || !minInclusive && minSortable=valSortable || !maxInclusive && maxSortable>valSortable)) { + if((minInclusive && minSortable<=valSortable || !minInclusive && minSortable=valSortable || !maxInclusive && maxSortable>valSortable || (max.equals("*") && val == positiveInfinity[i]))) { counter++; tests.add("//result/doc["+counter+"]/str[@name='id'][.="+(k+1)+"]"); tests.add("//result/doc["+counter+"]/float[@name='score'][.=1.0]"); From edff113e13f2a108d2f4177da3e88cf6f3402b23 Mon Sep 17 00:00:00 2001 From: Noble Paul Date: Thu, 27 Jul 2017 15:27:58 +0930 Subject: [PATCH 17/95] SOLR-10734: AtomicUpdateProcessorFactoryTest was not truly multithreaded --- .../processor/AtomicUpdateProcessorFactory.java | 4 ++-- .../processor/AtomicUpdateProcessorFactoryTest.java | 13 ++++++------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateProcessorFactory.java index 2135fb76c5b..9badb02dfd2 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateProcessorFactory.java @@ -63,7 +63,7 @@ public class AtomicUpdateProcessorFactory extends UpdateRequestProcessorFactory private final static String VERSION = "_version_"; public static final String NAME = "atomic"; public final static String ATOMIC_FIELD_PREFIX = "atomic."; - private final static int MAX_ATTEMPTS = 5; + private final static int MAX_ATTEMPTS = 25; private VersionInfo vinfo; @@ -165,7 +165,7 @@ public class AtomicUpdateProcessorFactory extends UpdateRequestProcessorFactory try { super.processAdd(cmd); } catch (SolrException e) { - if (attempts++ >= MAX_ATTEMPTS) {//maximum number of attempts allowed: 5 + if (attempts++ >= MAX_ATTEMPTS) {//maximum number of attempts allowed: 25 throw new SolrException(SERVER_ERROR, "Atomic update failed after multiple attempts due to " + e.getMessage()); } diff --git a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java index 45349373f87..999d70b48c7 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java @@ -209,7 +209,10 @@ public class AtomicUpdateProcessorFactoryTest extends SolrTestCaseJ4 { List threads = new ArrayList<>(100); int finalCount = 0; //int_i - for (int i = 0; i < 100; i++) { + AtomicUpdateProcessorFactory factory = new AtomicUpdateProcessorFactory(); + factory.inform(h.getCore()); + + for (int i = 0; i < 10; i++) { int index = random().nextInt(5); Thread t = new Thread() { @Override @@ -229,8 +232,6 @@ public class AtomicUpdateProcessorFactoryTest extends SolrTestCaseJ4 { cmd.solrDoc.addField("int_i", index); try { - AtomicUpdateProcessorFactory factory = new AtomicUpdateProcessorFactory(); - factory.inform(h.getCore()); factory.getInstance(cmd.getReq(), new SolrQueryResponse(), new DistributedUpdateProcessor(cmd.getReq(), new SolrQueryResponse(), new RunUpdateProcessor(cmd.getReq(), null))).processAdd(cmd); @@ -238,14 +239,12 @@ public class AtomicUpdateProcessorFactoryTest extends SolrTestCaseJ4 { } } }; - t.run(); threads.add(t); + t.start(); finalCount += index; //int_i } - for (Thread thread: threads) { - thread.join(); - } + for (Thread thread: threads) thread.join(); assertU(commit()); From 741b49e839845691d2b84292280969e0a506522e Mon Sep 17 00:00:00 2001 From: Noble Paul Date: Thu, 27 Jul 2017 17:07:07 +0930 Subject: [PATCH 18/95] SOLR-10858: Make UUIDUpdateProcessorFactory as Runtime URP --- solr/CHANGES.txt | 2 + .../processor/UUIDUpdateProcessorFactory.java | 35 +++++-- .../UpdateRequestProcessorChain.java | 1 + .../UUIDUpdateProcessorFallbackTest.java | 96 +++++++++++++++---- .../src/update-request-processors.adoc | 10 ++ 5 files changed, 117 insertions(+), 27 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 8446b46d23c..b493a1c9699 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -61,6 +61,8 @@ New Features * SOLR-11046: Add residuals Stream Evaluator (Joel Bernstein) +* SOLR-10858: Make UUIDUpdateProcessorFactory as Runtime URP (Amit Sarkar, noble) + Bug Fixes ---------------------- diff --git a/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java index ff8ff838239..12b96948f38 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java @@ -47,36 +47,55 @@ import org.apache.solr.schema.SchemaField; * </processor> * * + * *

- * If field name is omitted in processor configuration, + * You can also incoke the processor with request handler param(s) + * as uuid.fieldname with processor=uuid + * + * curl -X POST -H Content-Type: application/json + * http://localhost:8983/solr/test/update/json/docs?processor=uuid;ampersand;uuid.fieldName=id;ampersand;commit=true + * --data-binary {"id":"1","title": "titleA"} + * + * NOTE: The param(s) provided in request handler will override / supersede processor's config. + * + * If field name is omitted in processor configuration and not provided in request handler param(s), * then @{link org.apache.solr.schema.IndexSchema#getUniqueKeyField()} * is used as field and a new UUID will be generated * and added as the value of that field. The field type of the uniqueKeyField * must be anything which accepts a string or UUID value. + * + * + * * @see UUID */ public class UUIDUpdateProcessorFactory extends UpdateRequestProcessorFactory { + private static final String PREFIX_PARAM = "uuid."; + public static final String NAME = "uuid"; + private static final String FIELD_PARAM = "fieldName"; + + protected String fieldName = null; @SuppressWarnings("unchecked") public void init(NamedList args) { - Object obj = args.remove("fieldName"); + Object obj = args.remove(FIELD_PARAM); if (null != obj) { fieldName = obj.toString(); } - - if (0 < args.size()) { - throw new SolrException(SERVER_ERROR, - "Unexpected init param(s): '" + - args.getName(0) + "'"); - } } public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next ) { + String fieldName = this.fieldName; + + String fname = req.getParams().get(PREFIX_PARAM+FIELD_PARAM); + if (!StringUtils.isEmpty(fname)) { + fieldName = fname; + } + if (StringUtils.isEmpty(fieldName)) { SchemaField schemaField = req.getSchema().getUniqueKeyField(); fieldName = schemaField.getName(); diff --git a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java index 6bb212cbc46..bb0c129e5b5 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java +++ b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java @@ -321,6 +321,7 @@ public final class UpdateRequestProcessorChain implements PluginInfoInitialized public static final Map implicits = new ImmutableMap.Builder() .put(TemplateUpdateProcessorFactory.NAME, TemplateUpdateProcessorFactory.class) .put(AtomicUpdateProcessorFactory.NAME, AtomicUpdateProcessorFactory.class) + .put(UUIDUpdateProcessorFactory.NAME, UUIDUpdateProcessorFactory.class) .build(); } diff --git a/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java b/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java index 2b10953fc05..9561d5ae245 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java @@ -20,6 +20,7 @@ import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputField; import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.params.SolrParams; import org.apache.solr.core.SolrCore; import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.SolrQueryRequest; @@ -27,24 +28,29 @@ import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.update.AddUpdateCommand; import org.junit.BeforeClass; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.Date; +import java.util.List; import java.util.UUID; public class UUIDUpdateProcessorFallbackTest extends SolrTestCaseJ4 { + Date now = new Date(); + @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-update-processor-chains.xml", "schema.xml"); } public void testFallbackToUnique() throws Exception { - Date now = new Date(); // get all defaults SolrInputDocument d = processAdd("default-values-fallback-to-unique", - doc(f("name", "Existing", "Values"))); + doc(f("name", "Existing", "Values"))); assertNotNull(d); @@ -62,18 +68,18 @@ public class UUIDUpdateProcessorFallbackTest extends SolrTestCaseJ4 { // defaults already specified d = processAdd("default-values-fallback-to-unique", - doc(f("timestamp", now), - f("id", "550e8400-e29b-41d4-a716-446655440000"), - f("processor_default_s", "I HAVE A VALUE"), - f("processor_default_i", 12345), - f("name", "Existing", "Values"))); + doc(f("timestamp", now), + f("id", "550e8400-e29b-41d4-a716-446655440000"), + f("processor_default_s", "I HAVE A VALUE"), + f("processor_default_i", 12345), + f("name", "Existing", "Values"))); assertNotNull(d); assertEquals("550e8400-e29b-41d4-a716-446655440000", - d.getFieldValue("id")); + d.getFieldValue("id")); - // defaults already specified + // defaults already specified //both config and request param not passed. d = processAdd("default-values-fallback-to-unique-automatically", doc(f("timestamp", now), f("id", "550e8400-e29b-41d4-a716-446655440000"), @@ -88,8 +94,52 @@ public class UUIDUpdateProcessorFallbackTest extends SolrTestCaseJ4 { assertEquals(121, d.getFieldValue("processor_default_i")); } + public void testRequesTParams() throws Exception { + SolrInputDocument d = processAdd(null, + doc(f("name", "Existing", "Values"), f( "id","75765")), params("processor", "uuid", "uuid.fieldName", "id_s")); - /** + assertNotNull(d); + + assertNotNull(d.getFieldValue("id_s")); + assertNotNull(UUID.fromString(d.getFieldValue("id_s").toString())); + + + + // defaults already specified + d = processAdd(null, + doc(f("timestamp", now), + f("id", "454435"), + f("id_s", "550e8400-e29b-41d4-a716-446655440000"), + f("processor_default_s", "I HAVE A VALUE"), + f("processor_default_i", 121), + f("name", "Existing", "Values")) + , params("processor", "uuid", "uuid.fieldName", "id_s")); + + assertNotNull(d); + + assertEquals("550e8400-e29b-41d4-a716-446655440000", + d.getFieldValue("id_s")); + assertEquals(121, d.getFieldValue("processor_default_i")); + } + + public void testProcessorPrefixReqParam() throws Exception { + List processors = UpdateRequestProcessorChain.getReqProcessors("uuid", h.getCore()); + UpdateRequestProcessorFactory processorFactory = processors.get(0); + assertTrue(processorFactory instanceof UUIDUpdateProcessorFactory); + + SolrQueryResponse rsp = new SolrQueryResponse(); + SolrQueryRequest req = new LocalSolrQueryRequest(h.getCore(), new ModifiableSolrParams()); + AddUpdateCommand cmd = new AddUpdateCommand(req); + cmd.solrDoc = new SolrInputDocument(); + cmd.solrDoc.addField("random_s", "random_val"); + + processorFactory.getInstance(req, rsp, null).processAdd(cmd); + assertNotNull(cmd.solrDoc); + assertNotNull(cmd.solrDoc.get("id")); + assertNotNull(cmd.solrDoc.get("id").getValue()); + } + + /** * Convenience method for building up SolrInputDocuments */ SolrInputDocument doc(SolrInputField... fields) { @@ -100,7 +150,7 @@ public class UUIDUpdateProcessorFallbackTest extends SolrTestCaseJ4 { return d; } - /** + /** * Convenience method for building up SolrInputFields */ SolrInputField field(String name, float boost, Object... values) { @@ -111,7 +161,7 @@ public class UUIDUpdateProcessorFallbackTest extends SolrTestCaseJ4 { return f; } - /** + /** * Convenience method for building up SolrInputFields with default boost */ SolrInputField f(String name, Object... values) { @@ -120,22 +170,30 @@ public class UUIDUpdateProcessorFallbackTest extends SolrTestCaseJ4 { /** - * Runs a document through the specified chain, and returns the final - * document used when the chain is completed (NOTE: some chains may + * Runs a document through the specified chain, and returns the final + * document used when the chain is completed (NOTE: some chains may * modify the document in place */ - SolrInputDocument processAdd(final String chain, - final SolrInputDocument docIn) - throws IOException { + + SolrInputDocument processAdd(final String chain, + final SolrInputDocument docIn) throws IOException { + return processAdd(chain, docIn, params()); + } + + SolrInputDocument processAdd(final String chain, + final SolrInputDocument docIn, SolrParams params) + throws IOException { SolrCore core = h.getCore(); - UpdateRequestProcessorChain pc = core.getUpdateProcessingChain(chain); + UpdateRequestProcessorChain pc = chain == null ? + core.getUpdateProcessorChain(params) : + core.getUpdateProcessingChain(chain); assertNotNull("No Chain named: " + chain, pc); SolrQueryResponse rsp = new SolrQueryResponse(); SolrQueryRequest req = new LocalSolrQueryRequest - (core, new ModifiableSolrParams()); + (core, params); try { SolrRequestInfo.setRequestInfo(new SolrRequestInfo(req,rsp)); AddUpdateCommand cmd = new AddUpdateCommand(req); diff --git a/solr/solr-ref-guide/src/update-request-processors.adoc b/solr/solr-ref-guide/src/update-request-processors.adoc index a11d74ad34a..981c2e1dedf 100644 --- a/solr/solr-ref-guide/src/update-request-processors.adoc +++ b/solr/solr-ref-guide/src/update-request-processors.adoc @@ -398,3 +398,13 @@ The above parameters convert a normal `update` operation on * `field2` to an atomic `set` operation * `field3` to an atomic `inc` operation * `field4` to an atomic `remove` operation + +==== UUIDUpdateProcessorFactory + +Name of the processor is `uuid` . Use it to add a UUID to a field +example: + +[source,bash] +---- +processor=uuid&uuid.fieldName=somefield_name +---- From d90efebe75ed7fa3e376ae46529d4781aa940980 Mon Sep 17 00:00:00 2001 From: Noble Paul Date: Thu, 27 Jul 2017 17:07:51 +0930 Subject: [PATCH 19/95] SOLR-10858: ununsed imports --- .../processor/UUIDUpdateProcessorFallbackTest.java | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java b/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java index 9561d5ae245..e57e0ef2a13 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/UUIDUpdateProcessorFallbackTest.java @@ -16,6 +16,11 @@ */ package org.apache.solr.update.processor; +import java.io.IOException; +import java.util.Date; +import java.util.List; +import java.util.UUID; + import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputField; @@ -28,14 +33,6 @@ import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.response.SolrQueryResponse; import org.apache.solr.update.AddUpdateCommand; import org.junit.BeforeClass; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.lang.invoke.MethodHandles; -import java.util.Date; -import java.util.List; -import java.util.UUID; public class UUIDUpdateProcessorFallbackTest extends SolrTestCaseJ4 { From 59db1a86622c11fbf2facbe10b26e1f18a4e773d Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Thu, 27 Jul 2017 11:28:26 -0400 Subject: [PATCH 20/95] SOLR-10926: Increase the odds of randomly choosing point fields in our SolrTestCaseJ4 numeric type randomization --- solr/CHANGES.txt | 3 +++ .../src/java/org/apache/solr/SolrTestCaseJ4.java | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index b493a1c9699..0f437a9ea25 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -567,6 +567,9 @@ Other Changes * SOLR-11056: Add random range query test that compares results across Trie*, *Point and DocValue-only fields (Tomás Fernández Löbbe) +* SOLR-10926: Increase the odds of randomly choosing point fields in our SolrTestCaseJ4 numeric type randomization. + (hossman, Steve Rowe) + ================== 6.7.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java index 6e23d45d303..0446093324b 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java @@ -2705,7 +2705,7 @@ public abstract class SolrTestCaseJ4 extends LuceneTestCase { System.setProperty(NUMERIC_DOCVALUES_SYSPROP, ""+useDV); // consume a consistent amount of random data even if sysprop/annotation is set - final boolean randUsePoints = random().nextBoolean(); + final boolean randUsePoints = 0 != random().nextInt(5); // 80% likelihood final String usePointsStr = System.getProperty(USE_NUMERIC_POINTS_SYSPROP); final boolean usePoints = (null == usePointsStr) ? randUsePoints : Boolean.parseBoolean(usePointsStr); From ec0c11c703c24a565e9745ee9ebfeddc3e576118 Mon Sep 17 00:00:00 2001 From: David Smiley Date: Thu, 27 Jul 2017 18:08:08 -0400 Subject: [PATCH 21/95] SOLR-11093: small improvements to LongSet's iterator --- .../java/org/apache/solr/util/LongSet.java | 40 ++++++++++--------- 1 file changed, 21 insertions(+), 19 deletions(-) diff --git a/solr/core/src/java/org/apache/solr/util/LongSet.java b/solr/core/src/java/org/apache/solr/util/LongSet.java index e649e04177e..c204992d6da 100644 --- a/solr/core/src/java/org/apache/solr/util/LongSet.java +++ b/solr/core/src/java/org/apache/solr/util/LongSet.java @@ -18,6 +18,8 @@ package org.apache.solr.util; +import java.util.NoSuchElementException; + /** Collects long values in a hash set (closed hashing on power-of-two sized long[]) * @lucene.internal */ @@ -100,34 +102,34 @@ public class LongSet { } - /** Returns an iterator over the values in the set. - * hasNext() must return true for next() to return a valid value. - */ + /** Returns an iterator over the values in the set. */ public LongIterator iterator() { return new LongIterator() { - private boolean hasNext = zeroCount > 0; - private int i = -1; - private long value = 0; + private int remainingValues = cardinality(); + private int valsIdx = 0; @Override public boolean hasNext() { - if (hasNext) { - // this is only executed the first time for the special case 0 value - return true; - } - while (++i < vals.length) { - value = vals[i]; - if (value != 0) { - return hasNext = true; - } - } - return false; + return remainingValues > 0; } @Override public long next() { - hasNext = false; - return value; + if (!hasNext()) { + throw new NoSuchElementException(); + } + remainingValues--; + + if (remainingValues == 0 && zeroCount > 0) { + return 0; + } + + while (true) { // guaranteed to find another value if we get here + long value = vals[valsIdx++]; + if (value != 0) { + return value; + } + } } }; From ac2384426d09c54f9b8247ca89dc657d33077f5a Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Thu, 27 Jul 2017 19:54:14 -0400 Subject: [PATCH 22/95] SOLR-10858: remove unused imports --- .../solr/update/processor/UUIDUpdateProcessorFactory.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java index 12b96948f38..15ce878566f 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/UUIDUpdateProcessorFactory.java @@ -20,8 +20,6 @@ import java.util.UUID; import java.util.Locale; import org.apache.commons.lang.StringUtils; -import org.apache.solr.common.SolrException; -import static org.apache.solr.common.SolrException.ErrorCode.*; import org.apache.solr.common.util.NamedList; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.response.SolrQueryResponse; From b79c932a9f924f158f21bb23019685fec602d75e Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Thu, 27 Jul 2017 22:17:06 -0400 Subject: [PATCH 23/95] SOLR-10846: ExternalFileField/FloatFieldSource should throw a clear exception on initialization with a Points-based keyField, which is not supported --- solr/CHANGES.txt | 3 ++ .../apache/solr/schema/ExternalFileField.java | 6 +++ .../solr/collection1/conf/bad-schema-eff.xml | 44 +++++++++++++++++++ .../solr/collection1/conf/schema11.xml | 5 ++- .../schema/ExternalFileFieldSortTest.java | 21 +++++---- .../search/function/TestFunctionQuery.java | 5 +-- 6 files changed, 71 insertions(+), 13 deletions(-) create mode 100644 solr/core/src/test-files/solr/collection1/conf/bad-schema-eff.xml diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 0f437a9ea25..79f679784e4 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -569,6 +569,9 @@ Other Changes * SOLR-10926: Increase the odds of randomly choosing point fields in our SolrTestCaseJ4 numeric type randomization. (hossman, Steve Rowe) + +* SOLR-10846: ExternalFileField/FloatFieldSource should throw a clear exception on initialization with + a Points-based keyField, which is not supported. (hossman, Steve Rowe) ================== 6.7.0 ================== diff --git a/solr/core/src/java/org/apache/solr/schema/ExternalFileField.java b/solr/core/src/java/org/apache/solr/schema/ExternalFileField.java index da4b4db1225..db2d11b8433 100644 --- a/solr/core/src/java/org/apache/solr/schema/ExternalFileField.java +++ b/solr/core/src/java/org/apache/solr/schema/ExternalFileField.java @@ -22,6 +22,7 @@ import java.util.Map; import org.apache.lucene.index.IndexableField; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.search.SortField; +import org.apache.solr.common.SolrException; import org.apache.solr.response.TextResponseWriter; import org.apache.solr.search.QParser; import org.apache.solr.search.function.FileFloatSource; @@ -122,5 +123,10 @@ public class ExternalFileField extends FieldType implements SchemaAware { @Override public void inform(IndexSchema schema) { this.schema = schema; + + if (keyFieldName != null && schema.getFieldType(keyFieldName).isPointField()) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, + "keyField '" + keyFieldName + "' has a Point field type, which is not supported."); + } } } diff --git a/solr/core/src/test-files/solr/collection1/conf/bad-schema-eff.xml b/solr/core/src/test-files/solr/collection1/conf/bad-schema-eff.xml new file mode 100644 index 00000000000..da662819f77 --- /dev/null +++ b/solr/core/src/test-files/solr/collection1/conf/bad-schema-eff.xml @@ -0,0 +1,44 @@ + + + + + + + + + + + + + id + + + + + + + + + + + + + + diff --git a/solr/core/src/test-files/solr/collection1/conf/schema11.xml b/solr/core/src/test-files/solr/collection1/conf/schema11.xml index 674c25f8514..8b317b046f1 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema11.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema11.xml @@ -276,7 +276,10 @@ valued. --> - + + + diff --git a/solr/core/src/test/org/apache/solr/schema/ExternalFileFieldSortTest.java b/solr/core/src/test/org/apache/solr/schema/ExternalFileFieldSortTest.java index 1438cac1873..632b4137250 100644 --- a/solr/core/src/test/org/apache/solr/schema/ExternalFileFieldSortTest.java +++ b/solr/core/src/test/org/apache/solr/schema/ExternalFileFieldSortTest.java @@ -18,7 +18,7 @@ package org.apache.solr.schema; import org.apache.commons.io.FileUtils; import org.apache.solr.SolrTestCaseJ4; -import org.junit.BeforeClass; +import org.apache.solr.common.SolrException; import org.junit.Test; import java.io.File; @@ -26,12 +26,6 @@ import java.io.IOException; public class ExternalFileFieldSortTest extends SolrTestCaseJ4 { - @BeforeClass - public static void beforeTests() throws Exception { - initCore("solrconfig-basic.xml", "schema-eff.xml"); - updateExternalFile(); - } - static void updateExternalFile() throws IOException { final String testHome = SolrTestCaseJ4.getFile("solr/collection1").getParent(); String filename = "external_eff"; @@ -48,7 +42,10 @@ public class ExternalFileFieldSortTest extends SolrTestCaseJ4 { } @Test - public void testSort() { + public void testSort() throws Exception { + initCore("solrconfig-basic.xml", "schema-eff.xml"); + updateExternalFile(); + addDocuments(); assertQ("query", req("q", "*:*", "sort", "eff asc"), @@ -56,4 +53,12 @@ public class ExternalFileFieldSortTest extends SolrTestCaseJ4 { "//result/doc[position()=2]/str[.='1']", "//result/doc[position()=10]/str[.='8']"); } + + @Test + public void testPointKeyFieldType() throws Exception { + // This one should fail though, no "node" parameter specified + SolrException e = expectThrows(SolrException.class, + () -> initCore("solrconfig-basic.xml", "bad-schema-eff.xml")); + assertTrue(e.getMessage().contains("has a Point field type, which is not supported.")); + } } diff --git a/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java b/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java index 4cee94b86bb..afc8a0dc5a0 100644 --- a/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java +++ b/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java @@ -293,11 +293,8 @@ public class TestFunctionQuery extends SolrTestCaseJ4 { @Test public void testExternalFileFieldNumericKey() throws Exception { - assumeFalse("SOLR-10846: ExternalFileField/FileFloatSource throws NPE if keyField is Points based", - Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)); - final String extField = "eff_trie"; - final String keyField = "eff_ti"; + final String keyField = "eff_tint"; assertU(adoc("id", "991", keyField, "91")); assertU(adoc("id", "992", keyField, "92")); assertU(adoc("id", "993", keyField, "93")); From 51b68404883f9cff4a130ebc378adb04dbd73a3e Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Fri, 28 Jul 2017 11:07:44 -0400 Subject: [PATCH 24/95] SOLR-11155: /analysis/field and /analysis/document requests should support points fields. --- solr/CHANGES.txt | 3 ++ .../handler/AnalysisRequestHandlerBase.java | 11 +++++- .../org/apache/solr/schema/FieldType.java | 11 +++++- .../DocumentAnalysisRequestHandlerTest.java | 37 ++++++++++++------- .../FieldAnalysisRequestHandlerTest.java | 28 ++++++++++---- .../solr/client/solrj/SolrExampleTests.java | 9 ++--- 6 files changed, 69 insertions(+), 30 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 79f679784e4..9c1a5d12118 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -572,6 +572,9 @@ Other Changes * SOLR-10846: ExternalFileField/FloatFieldSource should throw a clear exception on initialization with a Points-based keyField, which is not supported. (hossman, Steve Rowe) + +* SOLR-11155: /analysis/field and /analysis/document requests should support points fields. + (Jason Gerlowski, Steve Rowe) ================== 6.7.0 ================== diff --git a/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java index 0770bd40508..579e5ef668c 100644 --- a/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java +++ b/solr/core/src/java/org/apache/solr/handler/AnalysisRequestHandlerBase.java @@ -33,6 +33,7 @@ import org.apache.commons.lang.ArrayUtils; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.tokenattributes.BytesTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; @@ -248,8 +249,14 @@ public abstract class AnalysisRequestHandlerBase extends RequestHandlerBase { for (int i = 0; i < tokens.length; i++) { AttributeSource token = tokens[i]; final NamedList tokenNamedList = new SimpleOrderedMap<>(); - final TermToBytesRefAttribute termAtt = token.getAttribute(TermToBytesRefAttribute.class); - BytesRef rawBytes = termAtt.getBytesRef(); + final BytesRef rawBytes; + if (token.hasAttribute(BytesTermAttribute.class)) { + final BytesTermAttribute bytesAtt = token.getAttribute(BytesTermAttribute.class); + rawBytes = bytesAtt.getBytesRef(); + } else { + final TermToBytesRefAttribute termAtt = token.getAttribute(TermToBytesRefAttribute.class); + rawBytes = termAtt.getBytesRef(); + } final String text = fieldType.indexedToReadable(rawBytes, new CharsRefBuilder()).toString(); tokenNamedList.add("text", text); diff --git a/solr/core/src/java/org/apache/solr/schema/FieldType.java b/solr/core/src/java/org/apache/solr/schema/FieldType.java index 26fc6578064..3d38844d482 100644 --- a/solr/core/src/java/org/apache/solr/schema/FieldType.java +++ b/solr/core/src/java/org/apache/solr/schema/FieldType.java @@ -30,6 +30,7 @@ import java.util.Set; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.tokenattributes.BytesTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.util.CharFilterFactory; @@ -479,14 +480,20 @@ public abstract class FieldType extends FieldProperties { Tokenizer ts = new Tokenizer() { final char[] cbuf = new char[maxChars]; final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + final BytesTermAttribute bytesAtt = isPointField() ? addAttribute(BytesTermAttribute.class) : null; final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); @Override public boolean incrementToken() throws IOException { clearAttributes(); int n = input.read(cbuf,0,maxChars); if (n<=0) return false; - String s = toInternal(new String(cbuf,0,n)); - termAtt.setEmpty().append(s); + if (isPointField()) { + BytesRef b = ((PointField)FieldType.this).toInternalByteRef(new String(cbuf, 0, n)); + bytesAtt.setBytesRef(b); + } else { + String s = toInternal(new String(cbuf, 0, n)); + termAtt.setEmpty().append(s); + } offsetAtt.setOffset(correctOffset(0),correctOffset(n)); return true; } diff --git a/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java index d3b0ab0e78f..7f195263054 100644 --- a/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/DocumentAnalysisRequestHandlerTest.java @@ -210,6 +210,7 @@ public class DocumentAnalysisRequestHandlerTest extends AnalysisRequestHandlerTe document.addField("id", 1); document.addField("whitetok", "Jumping Jack"); document.addField("text", "The Fox Jumped Over The Dogs"); + document.addField("number_l_p", 88L); DocumentAnalysisRequest request = new DocumentAnalysisRequest() .setQuery("JUMPING") @@ -221,35 +222,45 @@ public class DocumentAnalysisRequestHandlerTest extends AnalysisRequestHandlerTe NamedList>> documentResult = (NamedList>>) result.get("1"); assertNotNull("An analysis for document with key '1' should be returned", documentResult); - // the id field - NamedList> idResult = documentResult.get("id"); - assertNotNull("an analysis for the 'id' field should be returned", idResult); - NamedList queryResult; List tokenList; NamedList indexResult; NamedList> valueResult; + String name; - /*** Much of this test seems invalid for a numeric "id" field - NamedList queryResult = idResult.get("query"); + // the id field + NamedList> idResult = documentResult.get("id"); + assertNotNull("an analysis for the 'id' field should be returned", idResult); + queryResult = idResult.get("query"); assertEquals("Only the default analyzer should be applied", 1, queryResult.size()); - String name = queryResult.getName(0); + name = queryResult.getName(0); assertTrue("Only the default analyzer should be applied", name.matches("org.apache.solr.schema.FieldType\\$DefaultAnalyzer.*")); - List tokenList = (List) queryResult.getVal(0); + tokenList = (List) queryResult.getVal(0); assertEquals("Query has only one token", 1, tokenList.size()); assertToken(tokenList.get(0), new TokenInfo("JUMPING", null, "word", 0, 7, 1, new int[]{1}, null, false)); - NamedList indexResult = idResult.get("index"); - + indexResult = idResult.get("index"); assertEquals("The id field has only a single value", 1, indexResult.size()); - NamedList> valueResult = (NamedList>) indexResult.get("1"); + valueResult = (NamedList>) indexResult.get("1"); assertEquals("Only the default analyzer should be applied", 1, valueResult.size()); name = queryResult.getName(0); assertTrue("Only the default analyzer should be applied", name.matches("org.apache.solr.schema.FieldType\\$DefaultAnalyzer.*")); tokenList = valueResult.getVal(0); assertEquals("The 'id' field value has only one token", 1, tokenList.size()); assertToken(tokenList.get(0), new TokenInfo("1", null, "word", 0, 1, 1, new int[]{1}, null, false)); - ***/ - + + // the number_l_p field + NamedList> number_l_p_Result = documentResult.get("number_l_p"); + assertNotNull("an analysis for the 'number_l_p' field should be returned", number_l_p_Result); + indexResult = number_l_p_Result.get("index"); + assertEquals("The number_l_p field has only a single value", 1, indexResult.size()); + valueResult = (NamedList>) indexResult.get("88"); + assertEquals("Only the default analyzer should be applied", 1, valueResult.size()); + name = queryResult.getName(0); + assertTrue("Only the default analyzer should be applied", name.matches("org.apache.solr.schema.FieldType\\$DefaultAnalyzer.*")); + tokenList = valueResult.getVal(0); + assertEquals("The 'number_l_p' field value has only one token", 1, tokenList.size()); + assertToken(tokenList.get(0), new TokenInfo("88", null, "word", 0, 2, 1, new int[]{1}, null, false)); + // the name field NamedList> whitetokResult = documentResult.get("whitetok"); assertNotNull("an analysis for the 'whitetok' field should be returned", whitetokResult); diff --git a/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java index 2b2fba9a13e..260dc4a86a5 100644 --- a/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/handler/FieldAnalysisRequestHandlerTest.java @@ -16,6 +16,11 @@ */ package org.apache.solr.handler; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenFilter; @@ -29,12 +34,12 @@ import org.apache.lucene.analysis.util.TokenFilterFactory; import org.apache.lucene.analysis.util.TokenizerFactory; import org.apache.lucene.util.AttributeFactory; import org.apache.solr.analysis.TokenizerChain; +import org.apache.solr.client.solrj.request.FieldAnalysisRequest; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.AnalysisParams; import org.apache.solr.common.params.CommonParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.util.NamedList; -import org.apache.solr.client.solrj.request.FieldAnalysisRequest; import org.apache.solr.request.LocalSolrQueryRequest; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.FieldType; @@ -43,11 +48,6 @@ import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - /** * A test for {@link FieldAnalysisRequestHandler}. * @@ -55,7 +55,7 @@ import java.util.List; * @since solr 1.4 */ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestBase { - + private FieldAnalysisRequestHandler handler; @Override @@ -69,6 +69,20 @@ public class FieldAnalysisRequestHandlerTest extends AnalysisRequestHandlerTestB public static void beforeClass() throws Exception { initCore("solrconfig.xml", "schema.xml"); } + + @Test + public void testPointField() throws Exception { + FieldAnalysisRequest request = new FieldAnalysisRequest(); + request.addFieldType("pint"); + request.setFieldValue("5"); + + NamedList nl = handler.handleAnalysisRequest(request, h.getCore().getLatestSchema()); + NamedList pintNL = (NamedList)nl.get("field_types").get("pint"); + NamedList indexNL = (NamedList)pintNL.get("index"); + ArrayList analyzerNL = (ArrayList)indexNL.get("org.apache.solr.schema.FieldType$DefaultAnalyzer$1"); + String text = (String)((NamedList)analyzerNL.get(0)).get("text"); + assertEquals("5", text); + } /** * Tests the {@link FieldAnalysisRequestHandler#resolveAnalysisRequest(org.apache.solr.request.SolrQueryRequest)} diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java index 39158cb366d..63bd72c6fa3 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java @@ -416,14 +416,11 @@ abstract public class SolrExampleTests extends SolrExampleTestsBase query.set(AnalysisParams.FIELD_VALUE, "ignore_exception"); try { client.query( query ); - Assert.fail("should have a server exception"); + Assert.fail("should have a number format exception"); } catch(SolrException ex) { - assertEquals(500, ex.code()); - assertThat(ex.getMessage(), containsString(" Can't generate internal string in PointField. use PointField.toInternalByteRef")); - } - catch (SolrServerException ex) { - assertThat(ex.getMessage(), containsString(" Can't generate internal string in PointField. use PointField.toInternalByteRef")); + assertEquals(400, ex.code()); + assertThat(ex.getMessage(), containsString("Invalid Number: ignore_exception")); } catch(Throwable t) { t.printStackTrace(); From 936bb4abe9847cd68b32561bfcb4e2d2991ad6ae Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Tue, 25 Jul 2017 18:04:18 +0100 Subject: [PATCH 25/95] Remove unused local variables in two tests. --- .../src/test/org/apache/solr/metrics/SolrMetricManagerTest.java | 1 - .../solr/metrics/reporters/solr/SolrCloudReportersTest.java | 1 - 2 files changed, 2 deletions(-) diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java index 5739e53454e..87fc883a55d 100644 --- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java @@ -192,7 +192,6 @@ public class SolrMetricManagerTest extends SolrTestCaseJ4 { @Test public void testReporters() throws Exception { - Random r = random(); SolrResourceLoader loader = new SolrResourceLoader(); SolrMetricManager metricManager = new SolrMetricManager(); diff --git a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java index 41848178ab2..5b6dc099905 100644 --- a/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/reporters/solr/SolrCloudReportersTest.java @@ -77,7 +77,6 @@ public class SolrCloudReportersTest extends SolrCloudTestCase { String leaderRegistryName = core.getCoreMetricManager().getLeaderRegistryName(); String coreName = core.getName(); String collectionName = core.getCoreDescriptor().getCollectionName(); - String coreNodeName = core.getCoreDescriptor().getCloudDescriptor().getCoreNodeName(); String replicaName = coreName.substring(coreName.indexOf("_replica_") + 1); String shardId = core.getCoreDescriptor().getCloudDescriptor().getShardId(); From 68623e975c7b98d62f5dc1a2cdf04be6512658be Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Fri, 28 Jul 2017 13:52:13 -0400 Subject: [PATCH 26/95] SOLR-10756: Undeprecate ZkStateReader.updateClusterState(), mark as @lucene.internal, and rename to forciblyRefreshAllClusterStateSlow(). --- solr/CHANGES.txt | 3 +++ solr/core/src/java/org/apache/solr/cloud/Overseer.java | 2 +- .../solr/cloud/TestTolerantUpdateProcessorCloud.java | 2 +- .../java/org/apache/solr/common/cloud/ZkStateReader.java | 7 ++++--- 4 files changed, 9 insertions(+), 5 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 9c1a5d12118..d572cdd50b9 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -575,6 +575,9 @@ Other Changes * SOLR-11155: /analysis/field and /analysis/document requests should support points fields. (Jason Gerlowski, Steve Rowe) + +* SOLR-10756: Undeprecate ZkStateReader.updateClusterState(), mark as @lucene.internal, and rename to + forciblyRefreshAllClusterStateSlow(). ================== 6.7.0 ================== diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java index 521434af546..7dd85bc8446 100644 --- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java +++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java @@ -142,7 +142,7 @@ public class Overseer implements Closeable { //TODO consider removing 'refreshClusterState' and simply check if clusterState is null if (refreshClusterState) { try { - reader.updateClusterState(); + reader.forciblyRefreshAllClusterStateSlow(); clusterState = reader.getClusterState(); zkStateWriter = new ZkStateWriter(reader, stats); refreshClusterState = false; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java index f66f8925737..864172082df 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java @@ -132,7 +132,7 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase { String nodeKey = jettyURL.getHost() + ":" + jettyURL.getPort() + jettyURL.getPath().replace("/","_"); urlMap.put(nodeKey, jettyURL.toString()); } - zkStateReader.updateClusterState(); + zkStateReader.forceUpdateCollection(COLLECTION_NAME); ClusterState clusterState = zkStateReader.getClusterState(); for (Slice slice : clusterState.getSlices(COLLECTION_NAME)) { String shardName = slice.getName(); diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java index 1b035347993..c01f15b26bd 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java @@ -261,10 +261,11 @@ public class ZkStateReader implements Closeable { /** * Forcibly refresh cluster state from ZK. Do this only to avoid race conditions because it's expensive. * - * @deprecated Don't call this, call {@link #forceUpdateCollection(String)} on a single collection if you must. + * It is cheaper to call {@link #forceUpdateCollection(String)} on a single collection if you must. + * + * @lucene.internal */ - @Deprecated - public void updateClusterState() throws KeeperException, InterruptedException { + public void forciblyRefreshAllClusterStateSlow() throws KeeperException, InterruptedException { synchronized (getUpdateLock()) { if (clusterState == null) { // Never initialized, just run normal initialization. From 232fb33fd1ffcfd30fb8e5652f5ebf4153e0e190 Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Fri, 28 Jul 2017 13:53:16 -0400 Subject: [PATCH 27/95] SOLR-10756: add attribution to CHANGES entry --- solr/CHANGES.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index d572cdd50b9..cb436d42c3e 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -577,7 +577,7 @@ Other Changes (Jason Gerlowski, Steve Rowe) * SOLR-10756: Undeprecate ZkStateReader.updateClusterState(), mark as @lucene.internal, and rename to - forciblyRefreshAllClusterStateSlow(). + forciblyRefreshAllClusterStateSlow(). (hossman, shalin, Steve Rowe) ================== 6.7.0 ================== From 924b3fd47290a8c0fb75ab9d6c7b23fa06463068 Mon Sep 17 00:00:00 2001 From: Ishan Chattopadhyaya Date: Sun, 30 Jul 2017 02:43:36 +0530 Subject: [PATCH 28/95] SOLR-11154: Child documents' return fields now include useDocValuesAsStored fields --- solr/CHANGES.txt | 3 ++ .../transform/ChildDocTransformerFactory.java | 11 +++++++ .../transform/TestChildDocTransformer.java | 33 +++++++++++++++++++ 3 files changed, 47 insertions(+) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index cb436d42c3e..e5abb3bc3b7 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -397,6 +397,9 @@ Bug Fixes * SOLR-11151: SolrInfoMBeanHandler.getDiff() ADD case non-functional: NPE when a bean value goes from null -> non-null. (Steve Rowe) +* SOLR-11154: Child documents' return fields now include useDocValuesAsStored fields (Mohammed Sheeri Shaketi Nauage via + Ishan Chattopadhyaya) + Optimizations ---------------------- diff --git a/solr/core/src/java/org/apache/solr/response/transform/ChildDocTransformerFactory.java b/solr/core/src/java/org/apache/solr/response/transform/ChildDocTransformerFactory.java index 45b0efc6632..ff7c0614f3d 100644 --- a/solr/core/src/java/org/apache/solr/response/transform/ChildDocTransformerFactory.java +++ b/solr/core/src/java/org/apache/solr/response/transform/ChildDocTransformerFactory.java @@ -17,6 +17,7 @@ package org.apache.solr.response.transform; import java.io.IOException; +import java.util.Set; import org.apache.lucene.document.Document; import org.apache.lucene.index.IndexableField; @@ -39,6 +40,7 @@ import org.apache.solr.search.DocList; import org.apache.solr.search.QParser; import org.apache.solr.search.QueryWrapperFilter; import org.apache.solr.search.SyntaxError; +import org.apache.solr.search.SolrDocumentFetcher; /** * @@ -135,12 +137,21 @@ class ChildDocTransformer extends DocTransformer { Query query = new ToChildBlockJoinQuery(parentQuery, parentsFilter); DocList children = context.getSearcher().getDocList(query, childFilterQuery, new Sort(), 0, limit); if(children.matches() > 0) { + SolrDocumentFetcher docFetcher = context.getSearcher().getDocFetcher(); + + Set dvFieldsToReturn = docFetcher.getNonStoredDVs(true); + boolean shouldDecorateWithDVs = dvFieldsToReturn.size() > 0; DocIterator i = children.iterator(); + while(i.hasNext()) { Integer childDocNum = i.next(); Document childDoc = context.getSearcher().doc(childDocNum); SolrDocument solrChildDoc = DocsStreamer.convertLuceneDocToSolrDoc(childDoc, schema); + if (shouldDecorateWithDVs) { + docFetcher.decorateDocValueFields(solrChildDoc, childDocNum, dvFieldsToReturn); + } + // TODO: future enhancement... // support an fl local param in the transformer, which is used to build // a private ReturnFields instance that we use to prune unwanted field diff --git a/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformer.java b/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformer.java index 2e68d78653d..71b77f44b2a 100644 --- a/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformer.java +++ b/solr/core/src/test/org/apache/solr/response/transform/TestChildDocTransformer.java @@ -59,6 +59,8 @@ public class TestChildDocTransformer extends SolrTestCaseJ4 { testSubQueryXML(); testSubQueryJSON(); + + testChildDocNonStoredDVFields(); } private void testChildDoctransformerXML() { @@ -205,6 +207,36 @@ public class TestChildDocTransformer extends SolrTestCaseJ4 { "/response/docs/[0]/_childDocuments_/[1]/id=='5'" }; + assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", + "fl", "*,[child parentFilter=\"subject:parentDocument\"]"), test1); + + assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", + "fl", "subject,[child parentFilter=\"subject:parentDocument\" childFilter=\"title:foo\"]"), test2); + + assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", + "fl", "subject,[child parentFilter=\"subject:parentDocument\" childFilter=\"title:bar\" limit=2]"), test3); + } + + private void testChildDocNonStoredDVFields() throws Exception { + String[] test1 = new String[] { + "/response/docs/[0]/_childDocuments_/[0]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[1]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[2]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[3]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[4]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[5]/intDvoDefault==42" + }; + + String[] test2 = new String[] { + "/response/docs/[0]/_childDocuments_/[0]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[1]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[2]/intDvoDefault==42" + }; + + String[] test3 = new String[] { + "/response/docs/[0]/_childDocuments_/[0]/intDvoDefault==42", + "/response/docs/[0]/_childDocuments_/[1]/intDvoDefault==42" + }; assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", "fl", "*,[child parentFilter=\"subject:parentDocument\"]"), test1); @@ -214,6 +246,7 @@ public class TestChildDocTransformer extends SolrTestCaseJ4 { assertJQ(req("q", "*:*", "fq", "subject:\"parentDocument\" ", "fl", "subject,[child parentFilter=\"subject:parentDocument\" childFilter=\"title:bar\" limit=2]"), test3); + } private void createSimpleIndex() { From 9601ea9d4be7a8e974b0c8e842856cd6c5e2a99c Mon Sep 17 00:00:00 2001 From: Ishan Chattopadhyaya Date: Sun, 30 Jul 2017 03:20:14 +0530 Subject: [PATCH 29/95] SOLR-10920: _default configset warning should not appear when using -n with create_collection --- solr/bin/solr | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solr/bin/solr b/solr/bin/solr index 821bbf3cbb1..eeb6da4c049 100755 --- a/solr/bin/solr +++ b/solr/bin/solr @@ -978,7 +978,7 @@ if [[ "$SCRIPT_CMD" == "create" || "$SCRIPT_CMD" == "create_core" || "$SCRIPT_CM exit 1 fi - if [ "$CREATE_CONFDIR" == "_default" ]; then + if [[ "$CREATE_CONFDIR" == "_default" ]] && ([[ "$CREATE_CONFNAME" == "" ]] || [[ "$CREATE_CONFNAME" == "_default" ]]); then echo "WARNING: Using _default configset. Data driven schema functionality is enabled by default, which is" echo " NOT RECOMMENDED for production use." echo From 8b32d6beb5a2a3addb00901a0317167228fce520 Mon Sep 17 00:00:00 2001 From: Noble Paul Date: Sun, 30 Jul 2017 11:57:54 +0930 Subject: [PATCH 30/95] SOLR-10734: Awaitsfix --- .../solr/update/processor/AtomicUpdateProcessorFactoryTest.java | 1 + 1 file changed, 1 insertion(+) diff --git a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java index 999d70b48c7..d9bad16ec42 100644 --- a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java +++ b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdateProcessorFactoryTest.java @@ -199,6 +199,7 @@ public class AtomicUpdateProcessorFactoryTest extends SolrTestCaseJ4 { } + @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-10734") public void testMultipleThreads() throws Exception { clearIndex(); String[] strings = new String[5]; From 90da5ce81cea82424dad6ba9ab1bf12d34d196e2 Mon Sep 17 00:00:00 2001 From: Shalin Shekhar Mangar Date: Sun, 30 Jul 2017 20:20:20 +0530 Subject: [PATCH 31/95] SOLR-6086: Replica is active during autowarming resulting in queries being sent to a replica that may not have a registered searcher. This causes spikes in response times when adding a replica in busy clusters --- solr/CHANGES.txt | 4 + .../org/apache/solr/cloud/ZkController.java | 59 ++- .../component/RealTimeGetComponent.java | 8 +- .../org/apache/solr/util/TestInjection.java | 36 ++ .../solr/cloud/TestCloudSearcherWarming.java | 360 ++++++++++++++++++ 5 files changed, 464 insertions(+), 3 deletions(-) create mode 100644 solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index e5abb3bc3b7..cb6b887b4a9 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -77,6 +77,10 @@ Bug Fixes * SOLR-10944: Get expression fails to return EOF tuple (Susheel Kumar, Joel Bernstein) +* SOLR-6086: Replica is active during autowarming resulting in queries being sent to a replica that + may not have a registered searcher. This causes spikes in response times when adding a replica + in busy clusters. (Ludovic Boutros, Timothy Potter, shalin) + Optimizations ---------------------- diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java index dee833fe3c7..a529e94454d 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java @@ -42,6 +42,7 @@ import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; @@ -93,8 +94,11 @@ import org.apache.solr.core.SolrCore; import org.apache.solr.core.SolrCoreInitializationException; import org.apache.solr.handler.admin.ConfigSetsHandlerApi; import org.apache.solr.logging.MDCLoggingContext; +import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.servlet.SolrDispatchFilter; import org.apache.solr.update.UpdateLog; +import org.apache.solr.util.RTimer; +import org.apache.solr.util.RefCounted; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.ConnectionLossException; @@ -1299,8 +1303,11 @@ public class ZkController { props.put(ZkStateReader.CORE_NODE_NAME_PROP, coreNodeName); } try (SolrCore core = cc.getCore(cd.getName())) { + if (core != null && state == Replica.State.ACTIVE) { + ensureRegisteredSearcher(core); + } if (core != null && core.getDirectoryFactory().isSharedStorage()) { - if (core != null && core.getDirectoryFactory().isSharedStorage()) { + if (core.getDirectoryFactory().isSharedStorage()) { props.put("dataDir", core.getDataDir()); UpdateLog ulog = core.getUpdateHandler().getUpdateLog(); if (ulog != null) { @@ -1312,7 +1319,7 @@ public class ZkController { // The core had failed to initialize (in a previous request, not this one), hence nothing to do here. log.info("The core '{}' had failed to initialize before.", cd.getName()); } - + ZkNodeProps m = new ZkNodeProps(props); if (updateLastState) { @@ -2511,4 +2518,52 @@ public class ZkController { log.warn("Could not publish node as down: " + e.getMessage()); } } + + /** + * Ensures that a searcher is registered for the given core and if not, waits until one is registered + */ + private static void ensureRegisteredSearcher(SolrCore core) throws InterruptedException { + if (!core.getSolrConfig().useColdSearcher) { + RefCounted registeredSearcher = core.getRegisteredSearcher(); + if (registeredSearcher != null) { + log.debug("Found a registered searcher: {} for core: {}", registeredSearcher.get(), core); + registeredSearcher.decref(); + } else { + Future[] waitSearcher = new Future[1]; + log.info("No registered searcher found for core: {}, waiting until a searcher is registered before publishing as active", core.getName()); + final RTimer timer = new RTimer(); + RefCounted searcher = null; + try { + searcher = core.getSearcher(false, true, waitSearcher, true); + boolean success = true; + if (waitSearcher[0] != null) { + log.debug("Waiting for first searcher of core {}, id: {} to be registered", core.getName(), core); + try { + waitSearcher[0].get(); + } catch (ExecutionException e) { + log.warn("Wait for a searcher to be registered for core " + core.getName() + ",id: " + core + " failed due to: " + e, e); + success = false; + } + } + if (success) { + if (searcher == null) { + // should never happen + log.debug("Did not find a searcher even after the future callback for core: {}, id: {}!!!", core.getName(), core); + } else { + log.info("Found a registered searcher: {}, took: {} ms for core: {}, id: {}", searcher.get(), timer.getTime(), core.getName(), core); + } + } + } finally { + if (searcher != null) { + searcher.decref(); + } + } + } + RefCounted newestSearcher = core.getNewestSearcher(false); + if (newestSearcher != null) { + log.debug("Found newest searcher: {} for core: {}, id: {}", newestSearcher.get(), core.getName(), core); + newestSearcher.decref(); + } + } + } } diff --git a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java index 6d70435612f..85cb6f60dec 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/RealTimeGetComponent.java @@ -84,6 +84,7 @@ import org.apache.solr.update.IndexFingerprint; import org.apache.solr.update.PeerSync; import org.apache.solr.update.UpdateLog; import org.apache.solr.util.RefCounted; +import org.apache.solr.util.TestInjection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -955,10 +956,15 @@ public class RealTimeGetComponent extends SearchComponent } public void processGetFingeprint(ResponseBuilder rb) throws IOException { + TestInjection.injectFailIndexFingerprintRequests(); + SolrQueryRequest req = rb.req; SolrParams params = req.getParams(); - + long maxVersion = params.getLong("getFingerprint", Long.MAX_VALUE); + if (TestInjection.injectWrongIndexFingerprint()) { + maxVersion = -1; + } IndexFingerprint fingerprint = IndexFingerprint.getFingerprint(req.getCore(), Math.abs(maxVersion)); rb.rsp.add("fingerprint", fingerprint); } diff --git a/solr/core/src/java/org/apache/solr/util/TestInjection.java b/solr/core/src/java/org/apache/solr/util/TestInjection.java index d7584da4e9e..422de73bb19 100644 --- a/solr/core/src/java/org/apache/solr/util/TestInjection.java +++ b/solr/core/src/java/org/apache/solr/util/TestInjection.java @@ -135,6 +135,10 @@ public class TestInjection { public static String splitFailureBeforeReplicaCreation = null; public static String waitForReplicasInSync = "true:60"; + + public static String failIndexFingerprintRequests = null; + + public static String wrongIndexFingerprint = null; private static Set timers = Collections.synchronizedSet(new HashSet()); @@ -152,11 +156,43 @@ public class TestInjection { prepRecoveryOpPauseForever = null; countPrepRecoveryOpPauseForever = new AtomicInteger(0); waitForReplicasInSync = "true:60"; + failIndexFingerprintRequests = null; + wrongIndexFingerprint = null; for (Timer timer : timers) { timer.cancel(); } } + + public static boolean injectWrongIndexFingerprint() { + if (wrongIndexFingerprint != null) { + Random rand = random(); + if (null == rand) return true; + + Pair pair = parseValue(wrongIndexFingerprint); + boolean enabled = pair.first(); + int chanceIn100 = pair.second(); + if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) { + return true; + } + } + return false; + } + + public static boolean injectFailIndexFingerprintRequests() { + if (failIndexFingerprintRequests != null) { + Random rand = random(); + if (null == rand) return true; + + Pair pair = parseValue(failIndexFingerprintRequests); + boolean enabled = pair.first(); + int chanceIn100 = pair.second(); + if (enabled && rand.nextInt(100) >= (100 - chanceIn100)) { + throw new SolrException(ErrorCode.SERVER_ERROR, "Random test index fingerprint fail"); + } + } + return true; + } public static boolean injectRandomDelayInCoreCreation() { if (randomDelayInCoreCreation != null) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java new file mode 100644 index 00000000000..c0cd5b88c22 --- /dev/null +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java @@ -0,0 +1,360 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.cloud; + +import java.io.IOException; +import java.lang.invoke.MethodHandles; +import java.util.Collection; +import java.util.Collections; +import java.util.Set; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; + +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.SolrRequest; +import org.apache.solr.client.solrj.SolrResponse; +import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.embedded.JettySolrRunner; +import org.apache.solr.client.solrj.impl.CloudSolrClient; +import org.apache.solr.client.solrj.request.CollectionAdminRequest; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.client.solrj.response.SolrResponseBase; +import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.cloud.CollectionStatePredicate; +import org.apache.solr.common.cloud.CollectionStateWatcher; +import org.apache.solr.common.cloud.DocCollection; +import org.apache.solr.common.cloud.Replica; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.ContentStream; +import org.apache.solr.common.util.ContentStreamBase; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.core.SolrCore; +import org.apache.solr.core.SolrEventListener; +import org.apache.solr.search.SolrIndexSearcher; +import org.apache.solr.servlet.SolrDispatchFilter; +import org.apache.solr.util.LogLevel; +import org.apache.solr.util.RefCounted; +import org.apache.solr.util.TestInjection; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Tests related to SOLR-6086 + */ +@LogLevel("org.apache.solr.cloud.overseer.*=DEBUG,org.apache.solr.cloud.Overseer=DEBUG,org.apache.solr.cloud.ZkController=DEBUG") +public class TestCloudSearcherWarming extends SolrCloudTestCase { + public static final AtomicReference coreNodeNameRef = new AtomicReference<>(null), + coreNameRef = new AtomicReference<>(null); + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final AtomicInteger sleepTime = new AtomicInteger(-1); + + @BeforeClass + public static void setupCluster() throws Exception { + useFactory("solr.StandardDirectoryFactory"); // necessary to find the index+tlog intact after restart + configureCluster(1) + .addConfig("conf", configset("cloud-minimal")) + .configure(); + } + + @Before + public void before() { + coreNameRef.set(null); + coreNodeNameRef.set(null); + sleepTime.set(-1); + + try { + CollectionAdminRequest.deleteCollection("testRepFactor1LeaderStartup").process(cluster.getSolrClient()); + } catch (Exception e) { + // ignore + } + try { + CollectionAdminRequest.deleteCollection("testPeersyncFailureReplicationSuccess").process(cluster.getSolrClient()); + } catch (Exception e) { + // ignore + } + } + + @Test + public void testRepFactor1LeaderStartup() throws Exception { + CloudSolrClient solrClient = cluster.getSolrClient(); + + String collectionName = "testRepFactor1LeaderStartup"; + CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, 1, 1) + .setCreateNodeSet(cluster.getJettySolrRunner(0).getNodeName()); + create.process(solrClient); + + waitForState("The collection should have 1 shard and 1 replica", collectionName, clusterShape(1, 1)); + + solrClient.setDefaultCollection(collectionName); + solrClient.getZkStateReader().registerCore(collectionName); + + String addListenerCommand = "{" + + "'add-listener' : {'name':'newSearcherListener','event':'newSearcher', 'class':'" + SleepingSolrEventListener.class.getName() + "'}" + + "'add-listener' : {'name':'firstSearcherListener','event':'firstSearcher', 'class':'" + SleepingSolrEventListener.class.getName() + "'}" + + "}"; + + ConfigRequest request = new ConfigRequest(SolrRequest.METHOD.POST, "/config", addListenerCommand); + solrClient.request(request); + + solrClient.add(new SolrInputDocument("id", "1")); + solrClient.commit(); + + AtomicInteger expectedDocs = new AtomicInteger(1); + AtomicReference failingCoreNodeName = new AtomicReference<>(); + CollectionStateWatcher stateWatcher = createActiveReplicaSearcherWatcher(expectedDocs, failingCoreNodeName); + + JettySolrRunner runner = cluster.getJettySolrRunner(0); + cluster.stopJettySolrRunner(0); + waitForState("", collectionName, clusterShape(1, 0)); + // restart + sleepTime.set(10000); + cluster.startJettySolrRunner(runner); + cluster.getSolrClient().getZkStateReader().registerCollectionStateWatcher(collectionName, stateWatcher); + waitForState("", collectionName, clusterShape(1, 1)); + assertNull("No replica should have been active without registering a searcher, found: " + failingCoreNodeName.get(), failingCoreNodeName.get()); + cluster.getSolrClient().getZkStateReader().removeCollectionStateWatcher(collectionName, stateWatcher); + } + + public void testPeersyncFailureReplicationSuccess() throws Exception { + CloudSolrClient solrClient = cluster.getSolrClient(); + + String collectionName = "testPeersyncFailureReplicationSuccess"; + CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName, 1, 1) + .setCreateNodeSet(cluster.getJettySolrRunner(0).getNodeName()); + create.process(solrClient); + + waitForState("The collection should have 1 shard and 1 replica", collectionName, clusterShape(1, 1)); + + solrClient.setDefaultCollection(collectionName); + solrClient.getZkStateReader().registerCore(collectionName); + + String addListenerCommand = "{" + + "'add-listener' : {'name':'newSearcherListener','event':'newSearcher', 'class':'" + SleepingSolrEventListener.class.getName() + "'}" + + "'add-listener' : {'name':'firstSearcherListener','event':'firstSearcher', 'class':'" + SleepingSolrEventListener.class.getName() + "'}" + + "}"; + + ConfigRequest request = new ConfigRequest(SolrRequest.METHOD.POST, "/config", addListenerCommand); + solrClient.request(request); + + solrClient.add(new SolrInputDocument("id", "1")); + solrClient.commit(); + + AtomicInteger expectedDocs = new AtomicInteger(1); + AtomicReference failingCoreNodeName = new AtomicReference<>(); + + QueryResponse response = solrClient.query(new SolrQuery("*:*")); + assertEquals(1, response.getResults().getNumFound()); + + // reset + coreNameRef.set(null); + coreNodeNameRef.set(null); + failingCoreNodeName.set(null); + sleepTime.set(5000); + + CollectionStateWatcher stateWatcher = createActiveReplicaSearcherWatcher(expectedDocs, failingCoreNodeName); + cluster.getSolrClient().getZkStateReader().registerCollectionStateWatcher(collectionName, stateWatcher); + + JettySolrRunner newNode = cluster.startJettySolrRunner(); + CollectionAdminRequest.addReplicaToShard(collectionName, "shard1") + .setNode(newNode.getNodeName()) + .process(solrClient); + + waitForState("The collection should have 1 shard and 2 replica", collectionName, clusterShape(1, 2)); + assertNull("No replica should have been active without registering a searcher, found: " + failingCoreNodeName.get(), failingCoreNodeName.get()); + + // stop the old node + log.info("Stopping old node 1"); + AtomicReference oldNodeName = new AtomicReference<>(cluster.getJettySolrRunner(0).getNodeName()); + JettySolrRunner oldNode = cluster.stopJettySolrRunner(0); + // the newly created replica should become leader + waitForState("The collection should have 1 shard and 1 replica", collectionName, clusterShape(1, 1)); + // the above call is not enough because we want to assert that the down'ed replica is not active + // but clusterShape will also return true if replica is not live -- which we don't want + CollectionStatePredicate collectionStatePredicate = (liveNodes, collectionState) -> { + for (Replica r : collectionState.getReplicas()) { + if (r.getNodeName().equals(oldNodeName.get())) { + return r.getState() == Replica.State.DOWN; + } + } + return false; + }; + waitForState("", collectionName, collectionStatePredicate); + assertNotNull(solrClient.getZkStateReader().getLeaderRetry(collectionName, "shard1")); + + // reset + coreNameRef.set(null); + coreNodeNameRef.set(null); + failingCoreNodeName.set(null); + sleepTime.set(5000); + + // inject wrong signature output + TestInjection.wrongIndexFingerprint = "true:100"; + // now lets restart the old node + log.info("Starting old node 1"); + cluster.startJettySolrRunner(oldNode); + waitForState("", collectionName, clusterShape(1, 2)); + // invoke statewatcher explicitly to avoid race condition where the assert happens before the state watcher is invoked by ZkStateReader + cluster.getSolrClient().getZkStateReader().registerCollectionStateWatcher(collectionName, stateWatcher); + assertNull("No replica should have been active without registering a searcher, found: " + failingCoreNodeName.get(), failingCoreNodeName.get()); + + oldNodeName.set(cluster.getJettySolrRunner(1).getNodeName()); + assertSame(oldNode, cluster.stopJettySolrRunner(1)); // old node is now at 1 + log.info("Stopping old node 2"); + waitForState("", collectionName, clusterShape(1, 1)); + waitForState("", collectionName, collectionStatePredicate); + + // reset + coreNameRef.set(null); + coreNodeNameRef.set(null); + failingCoreNodeName.set(null); + sleepTime.set(14000); // has to be higher than the twice the recovery wait pause between attempts plus some margin + + // inject failure + TestInjection.failIndexFingerprintRequests = "true:100"; + // now lets restart the old node again + log.info("Starting old node 2"); + cluster.startJettySolrRunner(oldNode); + waitForState("", collectionName, clusterShape(1, 2)); + // invoke statewatcher explicitly to avoid race condition where the assert happens before the state watcher is invoked by ZkStateReader + cluster.getSolrClient().getZkStateReader().registerCollectionStateWatcher(collectionName, stateWatcher); + assertNull("No replica should have been active without registering a searcher, found: " + failingCoreNodeName.get(), failingCoreNodeName.get()); + cluster.getSolrClient().getZkStateReader().removeCollectionStateWatcher(collectionName, stateWatcher); + } + + private CollectionStateWatcher createActiveReplicaSearcherWatcher(AtomicInteger expectedDocs, AtomicReference failingCoreNodeName) { + return new CollectionStateWatcher() { + @Override + public boolean onStateChanged(Set liveNodes, DocCollection collectionState) { + try { + String coreNodeName = coreNodeNameRef.get(); + String coreName = coreNameRef.get(); + if (coreNodeName == null || coreName == null) return false; + Replica replica = collectionState.getReplica(coreNodeName); + if (replica == null) return false; + log.info("Collection state: {}", collectionState); + if (replica.isActive(liveNodes)) { + log.info("Active replica: {}", coreNodeName); + for (int i = 0; i < cluster.getJettySolrRunners().size(); i++) { + JettySolrRunner jettySolrRunner = cluster.getJettySolrRunner(i); + log.info("Checking node: {}", jettySolrRunner.getNodeName()); + if (jettySolrRunner.getNodeName().equals(replica.getNodeName())) { + SolrDispatchFilter solrDispatchFilter = jettySolrRunner.getSolrDispatchFilter(); + try (SolrCore core = solrDispatchFilter.getCores().getCore(coreName)) { + if (core.getSolrConfig().useColdSearcher) { + log.error("useColdSearcher is enabled! It should not be enabled for this test!"); + assert false; + return false; + } + log.info("Found SolrCore: {}, id: {}", core.getName(), core); + RefCounted registeredSearcher = core.getRegisteredSearcher(); + if (registeredSearcher != null) { + log.error("registered searcher not null, maxdocs = {}", registeredSearcher.get().maxDoc()); + if (registeredSearcher.get().maxDoc() != expectedDocs.get()) { + failingCoreNodeName.set(coreNodeName); + registeredSearcher.decref(); + return false; + } else { + registeredSearcher.decref(); + return false; + } + } else { + log.error("registered searcher was null!"); + RefCounted newestSearcher = core.getNewestSearcher(false); + if (newestSearcher != null) { + SolrIndexSearcher searcher = newestSearcher.get(); + log.warn("newest searcher was: {}", searcher); + newestSearcher.decref(); + } else { + log.error("newest searcher was also null!"); + } + // no registered searcher but replica is active! + failingCoreNodeName.set(coreNodeName); + } + } + } + } + } + } catch (Exception e) { + log.error("Unexpected exception in state watcher", e); + } + return false; + } + }; + } + + public static class SleepingSolrEventListener implements SolrEventListener { + @Override + public void init(NamedList args) { + new RuntimeException().printStackTrace(); + System.out.println(args); + } + + @Override + public void postCommit() { + + } + + @Override + public void postSoftCommit() { + + } + + @Override + public void newSearcher(SolrIndexSearcher newSearcher, SolrIndexSearcher currentSearcher) { + if (sleepTime.get() > 0) { + TestCloudSearcherWarming.coreNodeNameRef.set(newSearcher.getCore().getCoreDescriptor().getCloudDescriptor().getCoreNodeName()); + TestCloudSearcherWarming.coreNameRef.set(newSearcher.getCore().getName()); + log.info("Sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}", sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + try { + Thread.sleep(sleepTime.get()); + } catch (InterruptedException e) { + e.printStackTrace(); + } + log.info("Finished sleeping for {} on newSearcher: {}, currentSearcher: {} belonging to (newest) core: {}, id: {}", sleepTime.get(), newSearcher, currentSearcher, newSearcher.getCore().getName(), newSearcher.getCore()); + } + } + } + + public static class ConfigRequest extends SolrRequest { + protected final String message; + + public ConfigRequest(METHOD m, String path, String message) { + super(m, path); + this.message = message; + } + + @Override + public SolrParams getParams() { + return null; + } + + @Override + public Collection getContentStreams() throws IOException { + return message != null ? Collections.singletonList(new ContentStreamBase.StringStream(message)) : null; + } + + @Override + protected SolrResponse createResponse(SolrClient client) { + return new SolrResponseBase(); + } + } +} From b1a65c8f5572004cabc2b8d5548bf07f22fd2b3e Mon Sep 17 00:00:00 2001 From: Shalin Shekhar Mangar Date: Mon, 31 Jul 2017 08:12:29 +0530 Subject: [PATCH 32/95] SOLR-6086: Remove unused import --- .../src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java | 1 - 1 file changed, 1 deletion(-) diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java index c0cd5b88c22..761785d2b8a 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudSearcherWarming.java @@ -29,7 +29,6 @@ import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrResponse; -import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.embedded.JettySolrRunner; import org.apache.solr.client.solrj.impl.CloudSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; From 53472c4b93c1c7a2e3672712ae7387f5ef34f9d7 Mon Sep 17 00:00:00 2001 From: Andrzej Bialecki Date: Mon, 31 Jul 2017 12:08:51 +0200 Subject: [PATCH 33/95] SOLR-11036: Separately report disk space metrics for solr.data.home and core root directory. --- solr/CHANGES.txt | 2 ++ .../org/apache/solr/core/CoreContainer.java | 13 ++++++++-- .../metrics/SolrMetricsIntegrationTest.java | 24 +++++++++++++++++++ 3 files changed, 37 insertions(+), 2 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index cb6b887b4a9..2c4e436c896 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -586,6 +586,8 @@ Other Changes * SOLR-10756: Undeprecate ZkStateReader.updateClusterState(), mark as @lucene.internal, and rename to forciblyRefreshAllClusterStateSlow(). (hossman, shalin, Steve Rowe) +* SOLR-11036: Separately report disk space metrics for solr.data.home and core root directory. (ab) + ================== 6.7.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java index a43af964a85..53a3bb31653 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java +++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java @@ -557,10 +557,19 @@ public class CoreContainer { true, "lazy", SolrInfoBean.Category.CONTAINER.toString(), "cores"); metricManager.registerGauge(null, registryName, () -> solrCores.getAllCoreNames().size() - solrCores.getLoadedCoreNames().size(), true, "unloaded", SolrInfoBean.Category.CONTAINER.toString(), "cores"); - metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(), + Path dataHome = cfg.getSolrDataHome() != null ? cfg.getSolrDataHome() : cfg.getCoreRootDirectory(); + metricManager.registerGauge(null, registryName, () -> dataHome.toFile().getTotalSpace(), true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs"); - metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(), + metricManager.registerGauge(null, registryName, () -> dataHome.toFile().getUsableSpace(), true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs"); + metricManager.registerGauge(null, registryName, () -> dataHome.toAbsolutePath().toString(), + true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs"); + metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getTotalSpace(), + true, "totalSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot"); + metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toFile().getUsableSpace(), + true, "usableSpace", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot"); + metricManager.registerGauge(null, registryName, () -> cfg.getCoreRootDirectory().toAbsolutePath().toString(), + true, "path", SolrInfoBean.Category.CONTAINER.toString(), "fs", "coreRoot"); // add version information metricManager.registerGauge(null, registryName, () -> this.getClass().getPackage().getSpecificationVersion(), true, "specification", SolrInfoBean.Category.CONTAINER.toString(), "version"); diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java index b2cb5f38b72..1a8eda80045 100644 --- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java +++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricsIntegrationTest.java @@ -22,6 +22,9 @@ import java.nio.file.Paths; import java.util.Map; import java.util.Random; +import com.codahale.metrics.Gauge; +import com.codahale.metrics.Metric; +import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; import org.apache.commons.io.FileUtils; import org.apache.lucene.util.TestUtil; @@ -150,4 +153,25 @@ public class SolrMetricsIntegrationTest extends SolrTestCaseJ4 { assertFalse("Reporter " + reporterName + " was incorrectly closed: " + mockReporter, mockReporter.didClose); } } + + @Test + public void testCoreContainerMetrics() throws Exception { + String registryName = SolrMetricManager.getRegistryName(SolrInfoBean.Group.node); + assertTrue(cc.getMetricManager().registryNames().toString(), cc.getMetricManager().registryNames().contains(registryName)); + MetricRegistry registry = cc.getMetricManager().registry(registryName); + Map metrics = registry.getMetrics(); + assertTrue(metrics.containsKey("CONTAINER.cores.loaded")); + assertTrue(metrics.containsKey("CONTAINER.cores.lazy")); + assertTrue(metrics.containsKey("CONTAINER.cores.unloaded")); + assertTrue(metrics.containsKey("CONTAINER.fs.totalSpace")); + assertTrue(metrics.containsKey("CONTAINER.fs.usableSpace")); + assertTrue(metrics.containsKey("CONTAINER.fs.path")); + assertTrue(metrics.containsKey("CONTAINER.fs.coreRoot.totalSpace")); + assertTrue(metrics.containsKey("CONTAINER.fs.coreRoot.usableSpace")); + assertTrue(metrics.containsKey("CONTAINER.fs.coreRoot.path")); + assertTrue(metrics.containsKey("CONTAINER.version.specification")); + assertTrue(metrics.containsKey("CONTAINER.version.implementation")); + Gauge g = (Gauge)metrics.get("CONTAINER.fs.path"); + assertEquals(g.getValue(), cc.getResourceLoader().getInstancePath().toAbsolutePath().toString()); + } } From 5adceeb6526f43eaf7b574c487915aad3891e5f3 Mon Sep 17 00:00:00 2001 From: Cassandra Targett Date: Mon, 31 Jul 2017 11:10:34 -0500 Subject: [PATCH 34/95] Ref Guide: add disclaimer about not providing support via comments --- solr/solr-ref-guide/src/_layouts/page.html | 2 ++ 1 file changed, 2 insertions(+) diff --git a/solr/solr-ref-guide/src/_layouts/page.html b/solr/solr-ref-guide/src/_layouts/page.html index 20758daff83..85bd84dbfd2 100755 --- a/solr/solr-ref-guide/src/_layouts/page.html +++ b/solr/solr-ref-guide/src/_layouts/page.html @@ -70,6 +70,8 @@ layout: default
+

Comments on this Page

+

We welcome feedback on Solr documentation. However, we cannot provide application support via comments. If you need help, please send a message to the Solr User mailing list.

From 88614dd15f80e832c90528e8b9b6d9abbaddbb93 Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Mon, 31 Jul 2017 14:24:11 -0400 Subject: [PATCH 35/95] SOLR-10919: ord & rord functions give confusing errors with PointFields --- solr/CHANGES.txt | 2 ++ .../solr/search/function/OrdFieldSource.java | 5 +++++ .../function/ReverseOrdFieldSource.java | 5 +++++ .../search/function/TestFunctionQuery.java | 19 +++++++++++++++++++ 4 files changed, 31 insertions(+) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 2c4e436c896..ed4d1e467fd 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -588,6 +588,8 @@ Other Changes * SOLR-11036: Separately report disk space metrics for solr.data.home and core root directory. (ab) +* SOLR-10919: ord & rord functions give confusing errors with PointFields. (hossman, Steve Rowe) + ================== 6.7.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java b/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java index 4637df64517..7cd81426c11 100644 --- a/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java +++ b/solr/core/src/java/org/apache/solr/search/function/OrdFieldSource.java @@ -33,6 +33,7 @@ import org.apache.lucene.queries.function.docvalues.IntDocValues; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.util.mutable.MutableValue; import org.apache.lucene.util.mutable.MutableValueInt; +import org.apache.solr.common.SolrException; import org.apache.solr.index.SlowCompositeReaderWrapper; import org.apache.solr.schema.SchemaField; import org.apache.solr.search.Insanity; @@ -77,6 +78,10 @@ public class OrdFieldSource extends ValueSource { if (o instanceof SolrIndexSearcher) { SolrIndexSearcher is = (SolrIndexSearcher) o; SchemaField sf = is.getSchema().getFieldOrNull(field); + if (sf != null && sf.getType().isPointField()) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "ord() is not supported over Points based field " + field); + } if (sf != null && sf.hasDocValues() == false && sf.multiValued() == false && sf.getType().getNumberType() != null) { // it's a single-valued numeric field: we must currently create insanity :( List leaves = is.getIndexReader().leaves(); diff --git a/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java b/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java index f379913184b..0ada4d5aba1 100644 --- a/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java +++ b/solr/core/src/java/org/apache/solr/search/function/ReverseOrdFieldSource.java @@ -31,6 +31,7 @@ import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; import org.apache.lucene.queries.function.docvalues.IntDocValues; import org.apache.lucene.search.SortedSetSelector; +import org.apache.solr.common.SolrException; import org.apache.solr.index.SlowCompositeReaderWrapper; import org.apache.solr.schema.SchemaField; import org.apache.solr.search.Insanity; @@ -77,6 +78,10 @@ public class ReverseOrdFieldSource extends ValueSource { if (o instanceof SolrIndexSearcher) { SolrIndexSearcher is = (SolrIndexSearcher) o; SchemaField sf = is.getSchema().getFieldOrNull(field); + if (sf != null && sf.getType().isPointField()) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "rord() is not supported over Points based field " + field); + } if (sf != null && sf.hasDocValues() == false && sf.multiValued() == false && sf.getType().getNumberType() != null) { // it's a single-valued numeric field: we must currently create insanity :( List leaves = is.getIndexReader().leaves(); diff --git a/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java b/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java index afc8a0dc5a0..8383a673b8c 100644 --- a/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java +++ b/solr/core/src/test/org/apache/solr/search/function/TestFunctionQuery.java @@ -28,6 +28,7 @@ import java.util.Random; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.SolrException; import org.junit.BeforeClass; import org.junit.Test; @@ -302,6 +303,24 @@ public class TestFunctionQuery extends SolrTestCaseJ4 { makeExternalFile(extField, "91=543210\n92=-8\n93=250\n=67"); singleTest(extField,"\0",991,543210,992,-8,993,250); } + + @Test + public void testOrdAndRordOverPointsField() throws Exception { + assumeTrue("Skipping test when points=false", Boolean.getBoolean(NUMERIC_POINTS_SYSPROP)); + clearIndex(); + + String field = "a_" + new String[] {"i","l","d","f"}[random().nextInt(4)]; + assertU(adoc("id", "1", field, "1")); + assertU(commit()); + + Exception e = expectThrows(SolrException.class, () -> h.query(req("q", "{!func}ord(" + field + ")", "fq", "id:1"))); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException)e).code()); + assertTrue(e.getMessage().contains("ord() is not supported over Points based field " + field)); + + e = expectThrows(SolrException.class, () -> h.query(req("q", "{!func}rord(" + field + ")", "fq", "id:1"))); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException)e).code()); + assertTrue(e.getMessage().contains("rord() is not supported over Points based field " + field)); + } @Test public void testGeneral() throws Exception { From 71517bc29258e0f1640f30f37f0a0aad1ed98f7a Mon Sep 17 00:00:00 2001 From: Joel Bernstein Date: Mon, 31 Jul 2017 15:39:55 -0400 Subject: [PATCH 36/95] SOLR-11160: Add normalDistribution, uniformDistribution, sample and kolmogorovSmirnov Stream Evaluators --- .../apache/solr/handler/StreamHandler.java | 7 +- .../client/solrj/io/eval/AscEvaluator.java | 80 ++++++++++++ .../eval/EmpiricalDistributionEvaluator.java | 53 +------- .../solrj/io/eval/HistogramEvaluator.java | 6 +- .../io/eval/KolmogorovSmirnovEvaluator.java | 102 +++++++++++++++ ....java => NormalDistributionEvaluator.java} | 24 ++-- .../client/solrj/io/eval/SampleEvaluator.java | 75 +++++++++++ .../io/eval/UniformDistributionEvaluator.java | 69 +++++++++++ .../solrj/io/stream/StreamExpressionTest.java | 116 ++++++++++-------- 9 files changed, 418 insertions(+), 114 deletions(-) create mode 100644 solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java create mode 100644 solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KolmogorovSmirnovEvaluator.java rename solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/{CumulativeProbabilityEvaluator.java => NormalDistributionEvaluator.java} (75%) create mode 100644 solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SampleEvaluator.java create mode 100644 solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UniformDistributionEvaluator.java diff --git a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java index d52972d493d..687eb41b6aa 100644 --- a/solr/core/src/java/org/apache/solr/handler/StreamHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/StreamHandler.java @@ -200,7 +200,6 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware, .withFunctionName("copyOf", CopyOfEvaluator.class) .withFunctionName("copyOfRange", CopyOfRangeEvaluator.class) .withFunctionName("cov", CovarianceEvaluator.class) - .withFunctionName("cumulativeProbability", CumulativeProbabilityEvaluator.class) .withFunctionName("describe", DescribeEvaluator.class) .withFunctionName("distance", DistanceEvaluator.class) .withFunctionName("empiricalDistribution", EmpiricalDistributionEvaluator.class) @@ -219,6 +218,12 @@ public class StreamHandler extends RequestHandlerBase implements SolrCoreAware, .withFunctionName("addAll", AddAllEvaluator.class) .withFunctionName("residuals", ResidualsEvaluator.class) .withFunctionName("plot", PlotStream.class) + .withFunctionName("normalDistribution", NormalDistributionEvaluator.class) + .withFunctionName("uniformDistribution", UniformDistributionEvaluator.class) + .withFunctionName("sample", SampleEvaluator.class) + .withFunctionName("kolmogorovSmirnov", KolmogorovSmirnovEvaluator.class) + .withFunctionName("ks", KolmogorovSmirnovEvaluator.class) + .withFunctionName("asc", AscEvaluator.class) // Boolean Stream Evaluators .withFunctionName("and", AndEvaluator.class) diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java new file mode 100644 index 00000000000..8e5e4a10ceb --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.solrj.io.eval; + +import java.io.IOException; +import java.math.BigDecimal; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Locale; + +import org.apache.solr.client.solrj.io.Tuple; +import org.apache.solr.client.solrj.io.stream.expr.Explanation; +import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType; +import org.apache.solr.client.solrj.io.stream.expr.Expressible; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter; +import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; + +public class AscEvaluator extends ComplexEvaluator implements Expressible { + + private static final long serialVersionUID = 1; + + public AscEvaluator(StreamExpression expression, StreamFactory factory) throws IOException { + super(expression, factory); + + if(1 != subEvaluators.size()){ + throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting one value but found %d",expression,subEvaluators.size())); + } + + } + + public List evaluate(Tuple tuple) throws IOException { + + StreamEvaluator colEval1 = subEvaluators.get(0); + + List numbers1 = (List)colEval1.evaluate(tuple); + List asc = new ArrayList(); + asc.addAll(numbers1); + Collections.sort(asc, new Comparator() { + @Override + public int compare(Number a, Number b) { + return new BigDecimal(a.toString()).compareTo(new BigDecimal(b.toString())); + + } + }); + + return asc; + } + + @Override + public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException { + StreamExpression expression = new StreamExpression(factory.getFunctionName(getClass())); + return expression; + } + + @Override + public Explanation toExplanation(StreamFactory factory) throws IOException { + return new Explanation(nodeId.toString()) + .withExpressionType(ExpressionType.EVALUATOR) + .withFunctionName(factory.getFunctionName(getClass())) + .withImplementingClass(getClass().getName()) + .withExpression(toExpression(factory).toString()); + } +} \ No newline at end of file diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EmpiricalDistributionEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EmpiricalDistributionEvaluator.java index 8456b4dc1c5..5a507022e67 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EmpiricalDistributionEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EmpiricalDistributionEvaluator.java @@ -17,14 +17,12 @@ package org.apache.solr.client.solrj.io.eval; import java.io.IOException; -import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Arrays; import org.apache.commons.math3.random.EmpiricalDistribution; -import org.apache.commons.math3.stat.descriptive.StatisticalSummary; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.stream.expr.Explanation; import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType; @@ -45,7 +43,7 @@ public class EmpiricalDistributionEvaluator extends ComplexEvaluator implements } } - public Tuple evaluate(Tuple tuple) throws IOException { + public Object evaluate(Tuple tuple) throws IOException { StreamEvaluator colEval1 = subEvaluators.get(0); @@ -60,56 +58,9 @@ public class EmpiricalDistributionEvaluator extends ComplexEvaluator implements EmpiricalDistribution empiricalDistribution = new EmpiricalDistribution(); empiricalDistribution.load(column1); - Map map = new HashMap(); - StatisticalSummary statisticalSummary = empiricalDistribution.getSampleStats(); - - map.put("max", statisticalSummary.getMax()); - map.put("mean", statisticalSummary.getMean()); - map.put("min", statisticalSummary.getMin()); - map.put("stdev", statisticalSummary.getStandardDeviation()); - map.put("sum", statisticalSummary.getSum()); - map.put("N", statisticalSummary.getN()); - map.put("var", statisticalSummary.getVariance()); - - return new EmpiricalDistributionTuple(empiricalDistribution, column1, map); + return empiricalDistribution; } - public static class EmpiricalDistributionTuple extends Tuple { - - private EmpiricalDistribution empiricalDistribution; - private double[] backingArray; - - public EmpiricalDistributionTuple(EmpiricalDistribution empiricalDistribution, double[] backingArray, Map map) { - super(map); - this.empiricalDistribution = empiricalDistribution; - this.backingArray = backingArray; - } - - public double percentile(double d) { - int slot = Arrays.binarySearch(backingArray, d); - - if(slot == 0) { - return 0.0; - } - - if(slot < 0) { - if(slot == -1) { - return 0.0D; - } else { - //Not a direct hit - slot = Math.abs(slot); - --slot; - if(slot == backingArray.length) { - return 1.0D; - } else { - return (this.empiricalDistribution.cumulativeProbability(backingArray[slot])); - } - } - } else { - return this.empiricalDistribution.cumulativeProbability(backingArray[slot]); - } - } - } @Override public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/HistogramEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/HistogramEvaluator.java index 0217bae8de6..beabe3a8701 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/HistogramEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/HistogramEvaluator.java @@ -45,7 +45,7 @@ public class HistogramEvaluator extends ComplexEvaluator implements Expressible } } - public List evaluate(Tuple tuple) throws IOException { + public List evaluate(Tuple tuple) throws IOException { StreamEvaluator colEval1 = subEvaluators.get(0); @@ -66,7 +66,7 @@ public class HistogramEvaluator extends ComplexEvaluator implements Expressible EmpiricalDistribution empiricalDistribution = new EmpiricalDistribution(bins); empiricalDistribution.load(column1); - List binList = new ArrayList(); + List binList = new ArrayList(); List summaries = empiricalDistribution.getBinStats(); for(SummaryStatistics statisticalSummary : summaries) { @@ -78,7 +78,7 @@ public class HistogramEvaluator extends ComplexEvaluator implements Expressible map.put("sum", statisticalSummary.getSum()); map.put("N", statisticalSummary.getN()); map.put("var", statisticalSummary.getVariance()); - binList.add(map); + binList.add(new Tuple(map)); } return binList; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KolmogorovSmirnovEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KolmogorovSmirnovEvaluator.java new file mode 100644 index 00000000000..aa7c537e298 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/KolmogorovSmirnovEvaluator.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.solrj.io.eval; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.math3.distribution.RealDistribution; +import org.apache.commons.math3.stat.inference.KolmogorovSmirnovTest; +import org.apache.solr.client.solrj.io.Tuple; +import org.apache.solr.client.solrj.io.stream.expr.Explanation; +import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType; +import org.apache.solr.client.solrj.io.stream.expr.Expressible; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter; +import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; + +public class KolmogorovSmirnovEvaluator extends ComplexEvaluator implements Expressible { + + private static final long serialVersionUID = 1; + + public KolmogorovSmirnovEvaluator(StreamExpression expression, StreamFactory factory) throws IOException { + super(expression, factory); + + if(subEvaluators.size() != 2){ + throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting at least two values but found %d",expression,subEvaluators.size())); + } + } + + public Tuple evaluate(Tuple tuple) throws IOException { + + StreamEvaluator se1 = subEvaluators.get(0); + StreamEvaluator se2 = subEvaluators.get(1); + + KolmogorovSmirnovTest ks = new KolmogorovSmirnovTest(); + List sample = (List)se2.evaluate(tuple); + double[] data = new double[sample.size()]; + + for(int i=0; i sample2 = (List)o; + double[] data2 = new double[sample2.size()]; + for(int i=0; i evaluate(Tuple tuple) throws IOException { + StreamEvaluator r = subEvaluators.get(0); + StreamEvaluator d = subEvaluators.get(1); + Number number = (Number)d.evaluate(tuple); + RealDistribution rd= (RealDistribution)r.evaluate(tuple); + double[] sample = rd.sample(number.intValue()); + List list = new ArrayList(); + for(double n : sample) { + list.add(n); + } + return list; + } + + @Override + public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException { + StreamExpression expression = new StreamExpression(factory.getFunctionName(getClass())); + return expression; + } + + @Override + public Explanation toExplanation(StreamFactory factory) throws IOException { + return new Explanation(nodeId.toString()) + .withExpressionType(ExpressionType.EVALUATOR) + .withFunctionName(factory.getFunctionName(getClass())) + .withImplementingClass(getClass().getName()) + .withExpression(toExpression(factory).toString()); + } +} \ No newline at end of file diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UniformDistributionEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UniformDistributionEvaluator.java new file mode 100644 index 00000000000..dec6aa40000 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UniformDistributionEvaluator.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.solrj.io.eval; + +import java.io.IOException; +import java.util.Locale; + + +import org.apache.commons.math3.distribution.UniformRealDistribution; +import org.apache.solr.client.solrj.io.Tuple; +import org.apache.solr.client.solrj.io.stream.expr.Explanation; +import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType; +import org.apache.solr.client.solrj.io.stream.expr.Expressible; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpression; +import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionParameter; +import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; + +public class UniformDistributionEvaluator extends ComplexEvaluator implements Expressible { + + private static final long serialVersionUID = 1; + + public UniformDistributionEvaluator(StreamExpression expression, StreamFactory factory) throws IOException { + super(expression, factory); + + if(2 != subEvaluators.size()){ + throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting one column but found %d",expression,subEvaluators.size())); + } + } + + public Object evaluate(Tuple tuple) throws IOException { + + StreamEvaluator numEval1 = subEvaluators.get(0); + StreamEvaluator numEval2 = subEvaluators.get(1); + + Number lower = (Number)numEval1.evaluate(tuple); + Number upper = (Number)numEval2.evaluate(tuple); + + return new UniformRealDistribution(lower.doubleValue(), upper.doubleValue()); + } + + @Override + public StreamExpressionParameter toExpression(StreamFactory factory) throws IOException { + StreamExpression expression = new StreamExpression(factory.getFunctionName(getClass())); + return expression; + } + + @Override + public Explanation toExplanation(StreamFactory factory) throws IOException { + return new Explanation(nodeId.toString()) + .withExpressionType(ExpressionType.EVALUATOR) + .withFunctionName(factory.getFunctionName(getClass())) + .withImplementingClass(getClass().getName()) + .withExpression(toExpression(factory).toString()); + } +} \ No newline at end of file diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java index 58a53856d86..94f4106439a 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java @@ -5825,10 +5825,10 @@ public class StreamExpressionTest extends SolrCloudTestCase { assertEquals(p, 2.4, 0.001); } - /* + @Test - public void testArraySort() throws Exception { - String cexpr = "arraySort(array(11.5, 12.3, 4, 3, 1, 0))"; + public void testAscend() throws Exception { + String cexpr = "asc(array(11.5, 12.3, 4, 3, 1, 0))"; ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); paramsLoc.set("expr", cexpr); paramsLoc.set("qt", "/stream"); @@ -5851,52 +5851,7 @@ public class StreamExpressionTest extends SolrCloudTestCase { assertEquals(asort.get(5).doubleValue(), 12.3, 0.0); } -*/ - @Test - public void testCumulativeProbability() throws Exception { - UpdateRequest updateRequest = new UpdateRequest(); - int i=0; - while(i<100) { - i=i+2; - updateRequest.add(id, "id_"+(i), "price_f", Integer.toString(i)); - } - - updateRequest.commit(cluster.getSolrClient(), COLLECTIONORALIAS); - - String expr = "search("+COLLECTIONORALIAS+", q=\"*:*\", fl=\"price_f\", sort=\"price_f asc\", rows=\"200\")"; - String cexpr = "let(a="+expr+", c=col(a, price_f), e=empiricalDistribution(c), " + - "tuple(p1=cumulativeProbability(e, 88), " + - "p2=cumulativeProbability(e, 2), " + - "p3=cumulativeProbability(e, 99), " + - "p4=cumulativeProbability(e, 77), " + - "p5=cumulativeProbability(e, 98)))"; - - ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); - paramsLoc.set("expr", cexpr); - paramsLoc.set("qt", "/stream"); - - String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS; - TupleStream solrStream = new SolrStream(url, paramsLoc); - - StreamContext context = new StreamContext(); - solrStream.setStreamContext(context); - List tuples = getTuples(solrStream); - assertTrue(tuples.size() == 1); - double percentile1 = tuples.get(0).getDouble("p1"); - double percentile2 = tuples.get(0).getDouble("p2"); - double percentile3 = tuples.get(0).getDouble("p3"); - double percentile4 = tuples.get(0).getDouble("p4"); - double percentile5 = tuples.get(0).getDouble("p5"); - - - assertEquals(.88D, percentile1, 0.001); - assertEquals(.0D, percentile2, 0.001); - assertEquals(1.0D, percentile3, 0.001); - assertEquals(.78D, percentile4, 0.001); - assertEquals(.98D, percentile5, 0.001); - - } @Test public void testRankTransform() throws Exception { @@ -6022,6 +5977,71 @@ public class StreamExpressionTest extends SolrCloudTestCase { } + @Test + public void testDistributions() throws Exception { + String cexpr = "let(a=normalDistribution(10, 2), " + + "b=sample(a, 250), " + + "c=normalDistribution(100, 6), " + + "d=sample(c, 250), " + + "u=uniformDistribution(1, 6),"+ + "t=sample(u, 250),"+ + "e=empiricalDistribution(d),"+ + "f=sample(e, 250),"+ + "tuple(sample=b, ks=ks(a,b), ks2=ks(a, d), ks3=ks(u, t)))"; + ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); + paramsLoc.set("expr", cexpr); + paramsLoc.set("qt", "/stream"); + String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS; + try { + TupleStream solrStream = new SolrStream(url, paramsLoc); + StreamContext context = new StreamContext(); + solrStream.setStreamContext(context); + List tuples = getTuples(solrStream); + assertTrue(tuples.size() == 1); + List out = (List) tuples.get(0).get("sample"); + + Map ks = (Map) tuples.get(0).get("ks"); + Map ks2 = (Map) tuples.get(0).get("ks2"); + Map ks3 = (Map) tuples.get(0).get("ks3"); + + assertTrue(out.size() == 250); + Number pvalue = (Number) ks.get("p-value"); + Number pvalue2 = (Number) ks2.get("p-value"); + Number pvalue3 = (Number) ks3.get("p-value"); + + assertTrue(pvalue.doubleValue() > .05D); + assertTrue(pvalue2.doubleValue() == 0); + assertTrue(pvalue3.doubleValue() > .05D); + + } catch(AssertionError e) { + + //This test will have random failures do to the random sampling. So if it fails try it again. + //If it fails twice in a row, we probably broke some code. + + TupleStream solrStream = new SolrStream(url, paramsLoc); + StreamContext context = new StreamContext(); + solrStream.setStreamContext(context); + List tuples = getTuples(solrStream); + assertTrue(tuples.size() == 1); + List out = (List) tuples.get(0).get("sample"); + + Map ks = (Map) tuples.get(0).get("ks"); + Map ks2 = (Map) tuples.get(0).get("ks2"); + Map ks3 = (Map) tuples.get(0).get("ks3"); + + assertTrue(out.size() == 250); + Number pvalue = (Number) ks.get("p-value"); + Number pvalue2 = (Number) ks2.get("p-value"); + Number pvalue3 = (Number) ks3.get("p-value"); + + assertTrue(pvalue.doubleValue() > .05D); + assertTrue(pvalue2.doubleValue() == 0); + assertTrue(pvalue3.doubleValue() > .05D); + } + } + + + @Test public void testResiduals() throws Exception { String cexpr = "let(a=array(1,2,3,4,5,6), b=array(2,4,6,8,10,12), c=regress(a,b), tuple(res=residuals(c,a,a)))"; From b058818a3edf5f7af94107dba2d208a68db0758b Mon Sep 17 00:00:00 2001 From: Joel Bernstein Date: Mon, 31 Jul 2017 15:57:30 -0400 Subject: [PATCH 37/95] SOLR-11160: Fix precommit --- .../client/solrj/io/eval/EmpiricalDistributionEvaluator.java | 1 - .../org/apache/solr/client/solrj/io/eval/SampleEvaluator.java | 1 - 2 files changed, 2 deletions(-) diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EmpiricalDistributionEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EmpiricalDistributionEvaluator.java index 5a507022e67..e57c225b38e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EmpiricalDistributionEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/EmpiricalDistributionEvaluator.java @@ -19,7 +19,6 @@ package org.apache.solr.client.solrj.io.eval; import java.io.IOException; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Arrays; import org.apache.commons.math3.random.EmpiricalDistribution; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SampleEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SampleEvaluator.java index 7c59ed430f8..8b725cf0225 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SampleEvaluator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/SampleEvaluator.java @@ -20,7 +20,6 @@ package org.apache.solr.client.solrj.io.eval; import java.io.IOException; import java.util.Locale; -import org.apache.commons.math3.distribution.NormalDistribution; import org.apache.commons.math3.distribution.RealDistribution; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.stream.expr.Explanation; From 6404abd2009ed19941dfc0471f7abbf2a47a2b34 Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Mon, 31 Jul 2017 17:03:08 -0400 Subject: [PATCH 38/95] SOLR-10847: Provide a clear exception when attempting to use the terms component with points fields --- solr/CHANGES.txt | 3 ++ .../handler/component/TermsComponent.java | 10 +++++++ .../DistributedTermsComponentTest.java | 2 +- .../handler/component/TermsComponentTest.java | 30 ++++++++++++++++++- 4 files changed, 43 insertions(+), 2 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index ed4d1e467fd..3f8f2e3b1f3 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -590,6 +590,9 @@ Other Changes * SOLR-10919: ord & rord functions give confusing errors with PointFields. (hossman, Steve Rowe) +* SOLR-10847: Provide a clear exception when attempting to use the terms component with points fields. + (hossman, Steve Rowe) + ================== 6.7.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java b/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java index 80dfa40932f..3f2786b7231 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java +++ b/solr/core/src/java/org/apache/solr/handler/component/TermsComponent.java @@ -108,6 +108,16 @@ public class TermsComponent extends SearchComponent { rb.rsp.add("terms", termsResult); if (fields == null || fields.length==0) return; + + for (String field : fields) { + FieldType fieldType = rb.req.getSchema().getFieldTypeNoEx(field); + if (null != fieldType) { + if (fieldType.isPointField()) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "The terms component does not support Points-based field " + field); + } + } + } boolean termStats = params.getBool(TermsParams.TERMS_STATS, false); diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java index 6f719d2bd29..53ee9061e9e 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedTermsComponentTest.java @@ -26,7 +26,7 @@ import org.junit.Test; * * @since solr 1.5 */ -@SuppressPointFields(bugUrl="https://issues.apache.org/jira/browse/SOLR-10847") +@SuppressPointFields(bugUrl="https://issues.apache.org/jira/browse/SOLR-11173") public class DistributedTermsComponentTest extends BaseDistributedSearchTestCase { @Test diff --git a/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java index 2a8b653aeac..ac7cf2ac057 100644 --- a/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java +++ b/solr/core/src/test/org/apache/solr/handler/component/TermsComponentTest.java @@ -16,6 +16,7 @@ */ package org.apache.solr.handler.component; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.SolrException; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.TermsParams; import org.apache.solr.request.SolrQueryRequest; @@ -29,7 +30,7 @@ import java.util.regex.Pattern; * **/ // TermsComponent not currently supported for PointFields -@SolrTestCaseJ4.SuppressPointFields(bugUrl="https://issues.apache.org/jira/browse/SOLR-10847") +@SolrTestCaseJ4.SuppressPointFields(bugUrl="https://issues.apache.org/jira/browse/SOLR-11173") public class TermsComponentTest extends SolrTestCaseJ4 { @BeforeClass @@ -378,4 +379,31 @@ public class TermsComponentTest extends SolrTestCaseJ4 { "//lst[@name='standardfilt']/lst[@name='aaa']/long[@name='ttf'][.='1']"); } + @Test + public void testPointField() throws Exception { + assertU(adoc("id", "10000", "foo_pi", "1")); + assertU(commit()); + + try { + final SolrQueryRequest req = req( + "qt", "/terms", + "terms", "true", + "terms.fl", "foo_pi"); + Exception e = expectThrows(SolrException.class, () -> h.query(req)); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException) e).code()); + assertTrue(e.getMessage().contains("The terms component does not support Points-based field foo_pi")); + + final SolrQueryRequest req2 = req( + "qt", "/terms", + "terms", "true", + "terms.fl", "foo_pi", + "terms.list", "1"); + e = expectThrows(SolrException.class, () -> h.query(req2)); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException) e).code()); + assertTrue(e.getMessage().contains("The terms component does not support Points-based field foo_pi")); + } finally { + assertU(delI("10000")); + assertU(commit()); + } + } } From 3a405971b9e06e2004e0d66ae1b82f530de969f2 Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Mon, 31 Jul 2017 18:21:49 -0400 Subject: [PATCH 39/95] SOLR-10033: Provide a clear exception when attempting to facet with facet.mincount=0 over points fields --- solr/CHANGES.txt | 3 ++ .../apache/solr/request/NumericFacets.java | 15 +++++++++- .../org/apache/solr/request/TestFaceting.java | 28 +++++++++++++++++-- 3 files changed, 43 insertions(+), 3 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 3f8f2e3b1f3..01b04aa37e7 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -592,6 +592,9 @@ Other Changes * SOLR-10847: Provide a clear exception when attempting to use the terms component with points fields. (hossman, Steve Rowe) + +* SOLR-10033: Provide a clear exception when attempting to facet with facet.mincount=0 over points fields. + (Steve Rowe) ================== 6.7.0 ================== diff --git a/solr/core/src/java/org/apache/solr/request/NumericFacets.java b/solr/core/src/java/org/apache/solr/request/NumericFacets.java index fd17f1f7397..f9f38b3dbaf 100644 --- a/solr/core/src/java/org/apache/solr/request/NumericFacets.java +++ b/solr/core/src/java/org/apache/solr/request/NumericFacets.java @@ -43,6 +43,7 @@ import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.StringHelper; +import org.apache.solr.common.SolrException; import org.apache.solr.common.params.FacetParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.schema.FieldType; @@ -178,6 +179,11 @@ final class NumericFacets { if (numericType == null) { throw new IllegalStateException(); } + if (zeros && ft.isPointField()) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Cannot use " + FacetParams.FACET_MINCOUNT + "=0 on field " + sf.getName() + " which is Points-based"); + } + zeros = zeros && !ft.isPointField() && sf.indexed(); // We don't return zeros when using PointFields or when index=false final List leaves = searcher.getIndexReader().leaves(); @@ -407,11 +413,18 @@ final class NumericFacets { private static NamedList getCountsMultiValued(SolrIndexSearcher searcher, DocSet docs, String fieldName, int offset, int limit, int mincount, boolean missing, String sort) throws IOException { // If facet.mincount=0 with PointFields the only option is to get the values from DocValues - // not currently supported. See SOLR-10033 + // not currently supported. See SOLR-11174 + boolean zeros = mincount <= 0; mincount = Math.max(mincount, 1); final SchemaField sf = searcher.getSchema().getField(fieldName); final FieldType ft = sf.getType(); assert sf.multiValued(); + + if (zeros && ft.isPointField()) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "Cannot use " + FacetParams.FACET_MINCOUNT + "=0 on field " + sf.getName() + " which is Points-based"); + } + final List leaves = searcher.getIndexReader().leaves(); // 1. accumulate diff --git a/solr/core/src/test/org/apache/solr/request/TestFaceting.java b/solr/core/src/test/org/apache/solr/request/TestFaceting.java index 9559b4ca405..1d99127d37e 100644 --- a/solr/core/src/test/org/apache/solr/request/TestFaceting.java +++ b/solr/core/src/test/org/apache/solr/request/TestFaceting.java @@ -27,6 +27,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.common.SolrException; import org.apache.solr.common.params.FacetParams; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.uninverting.DocTermOrds; @@ -335,7 +336,7 @@ public class TestFaceting extends SolrTestCaseJ4 { @Test public void testFacetSortWithMinCount0() { - assumeFalse("facet.mincount=0 doesn't work with point fields (SOLR-10033) or single valued DV", + assumeFalse("facet.mincount=0 doesn't work with point fields (SOLR-11174) or single valued DV", Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) || Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP)); assertU(adoc("id", "1", "f_td", "-420.126")); @@ -356,8 +357,31 @@ public class TestFaceting extends SolrTestCaseJ4 { "//lst[@name='facet_fields']/lst[@name='f_td']/int[3][@name='-1.218']"); } + @Test + public void testFacetOverPointFieldWithMinCount0() { + String field = "f_" + new String[]{"i","l","f","d"}[random().nextInt(4)] + "_p"; + final SolrQueryRequest req = req("q", "id:1.0", + FacetParams.FACET, "true", + FacetParams.FACET_FIELD, field, + FacetParams.FACET_MINCOUNT, "0", + FacetParams.FACET_METHOD, FacetParams.FACET_METHOD_fc); + Exception e = expectThrows(SolrException.class, () -> h.query(req)); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException)e).code()); + assertTrue(e.getMessage().contains("Cannot use facet.mincount=0 on field " + field + " which is Points-based")); - public void testSimpleFacetCountsWithMultipleConfigurationsForSameField() { + String mvField = "f_" + new String[]{"is","ls","fs","ds"}[random().nextInt(4)] + "_p"; + final SolrQueryRequest req2 = req("q", "id:1.0", + FacetParams.FACET, "true", + FacetParams.FACET_FIELD, mvField, + FacetParams.FACET_MINCOUNT, "0", + FacetParams.FACET_METHOD, FacetParams.FACET_METHOD_fc); + e = expectThrows(SolrException.class, () -> h.query(req2)); + assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException)e).code()); + assertTrue(e.getMessage().contains("Cannot use facet.mincount=0 on field " + mvField + " which is Points-based")); + } + + + public void testSimpleFacetCountsWithMultipleConfigurationsForSameField() { clearIndex(); String fname = "trait_ss"; assertU(adoc("id", "42", From 93ed4770ac82eb732c7409f82d02009e0fabe390 Mon Sep 17 00:00:00 2001 From: Cao Manh Dat Date: Tue, 1 Aug 2017 14:49:57 +0700 Subject: [PATCH 40/95] SOLR-9321: Remove deprecated methods of ClusterState --- solr/CHANGES.txt | 5 + .../java/org/apache/solr/cloud/CloudUtil.java | 7 +- .../org/apache/solr/cloud/DeleteShardCmd.java | 12 +- .../apache/solr/cloud/ElectionContext.java | 22 +- .../OverseerCollectionMessageHandler.java | 7 +- .../apache/solr/cloud/RecoveryStrategy.java | 4 +- .../org/apache/solr/cloud/ZkController.java | 16 +- .../solr/handler/CdcrRequestHandler.java | 4 +- .../solr/handler/SolrConfigHandler.java | 6 +- .../handler/admin/CollectionsHandler.java | 6 +- .../handler/component/HttpShardHandler.java | 2 +- .../solr/schema/ManagedIndexSchema.java | 6 +- .../search/join/ScoreJoinQParserPlugin.java | 2 +- .../org/apache/solr/servlet/HttpSolrCall.java | 3 +- .../processor/DistributedUpdateProcessor.java | 10 +- .../DocExpirationUpdateProcessorFactory.java | 2 +- .../java/org/apache/solr/util/SolrCLI.java | 6 +- .../solr/cloud/BasicDistributedZkTest.java | 7 +- .../solr/cloud/ChaosMonkeyShardSplitTest.java | 2 +- .../apache/solr/cloud/ClusterStateTest.java | 4 +- .../solr/cloud/ClusterStateUpdateTest.java | 4 +- .../CollectionsAPIAsyncDistributedZkTest.java | 4 +- .../apache/solr/cloud/ForceLeaderTest.java | 20 +- .../apache/solr/cloud/HttpPartitionTest.java | 10 +- .../org/apache/solr/cloud/OverseerTest.java | 40 +- .../solr/cloud/ReplicaPropertiesBase.java | 6 +- .../org/apache/solr/cloud/ShardSplitTest.java | 14 +- .../SharedFSAutoReplicaFailoverTest.java | 4 +- .../org/apache/solr/cloud/SliceStateTest.java | 2 +- .../solr/cloud/TestCloudDeleteByQuery.java | 2 +- .../apache/solr/cloud/TestCollectionAPI.java | 6 +- .../TestLeaderElectionWithEmptyReplica.java | 2 +- .../TestLeaderInitiatedRecoveryThread.java | 6 +- .../solr/cloud/TestMiniSolrCloudCluster.java | 389 ++++++++++ .../cloud/TestRandomRequestDistribution.java | 2 +- .../solr/cloud/TestReplicaProperties.java | 2 +- .../cloud/TestShortCircuitedRequests.java | 2 +- .../TestTolerantUpdateProcessorCloud.java | 2 +- .../solr/cloud/UnloadDistributedZkTest.java | 4 +- .../solr/cloud/hdfs/StressHdfsTest.java | 7 +- .../TestCloudManagedSchemaConcurrent.java | 717 ++++++++++++++++++ .../solr/schema/TestCloudSchemaless.java | 2 +- .../solr/common/cloud/ClusterState.java | 107 +-- .../solr/common/cloud/ClusterStateUtil.java | 2 +- .../solr/common/cloud/ZkStateReader.java | 7 +- .../solr/cloud/AbstractDistribZkTestBase.java | 15 +- .../cloud/AbstractFullDistribZkTestBase.java | 26 +- .../org/apache/solr/cloud/ChaosMonkey.java | 2 +- 48 files changed, 1296 insertions(+), 243 deletions(-) create mode 100644 solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java create mode 100644 solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 01b04aa37e7..21d0789da06 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -253,6 +253,9 @@ Upgrading from Solr 6.x replaced with 'sourceNode' and 'targetNode' instead. The old names will continue to work for back-compatibility but they will be removed in 8.0. See SOLR-11068 for more details. +* All deperated methods of ClusterState (except getZkClusterStateVersion()) + have been removed. Use DocCollection methods instead. + New Features ---------------------- * SOLR-9857, SOLR-9858: Collect aggregated metrics from nodes and shard leaders in overseer. (ab) @@ -596,6 +599,8 @@ Other Changes * SOLR-10033: Provide a clear exception when attempting to facet with facet.mincount=0 over points fields. (Steve Rowe) +* SOLR-9321: Remove deprecated methods of ClusterState. (Jason Gerlowski, ishan, Cao Manh Dat) + ================== 6.7.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java index c05072d5bf0..81de5cdae6d 100644 --- a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java +++ b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java @@ -27,6 +27,7 @@ import java.util.Map; import org.apache.commons.io.FileUtils; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.SolrZkClient; @@ -57,9 +58,9 @@ public class CloudUtil { log.debug("checkSharedFSFailoverReplaced running for coreNodeName={} baseUrl={}", thisCnn, thisBaseUrl); // if we see our core node name on a different base url, unload - Map slicesMap = zkController.getClusterState().getSlicesMap(desc.getCloudDescriptor().getCollectionName()); - - if (slicesMap != null) { + final DocCollection docCollection = zkController.getClusterState().getCollectionOrNull(desc.getCloudDescriptor().getCollectionName()); + if (docCollection != null && docCollection.getSlicesMap() != null) { + Map slicesMap = docCollection.getSlicesMap(); for (Slice slice : slicesMap.values()) { for (Replica replica : slice.getReplicas()) { diff --git a/solr/core/src/java/org/apache/solr/cloud/DeleteShardCmd.java b/solr/core/src/java/org/apache/solr/cloud/DeleteShardCmd.java index 71d9c46dc04..43bd6bd3f66 100644 --- a/solr/core/src/java/org/apache/solr/cloud/DeleteShardCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/DeleteShardCmd.java @@ -65,16 +65,10 @@ public class DeleteShardCmd implements Cmd { String sliceId = message.getStr(ZkStateReader.SHARD_ID_PROP); log.info("Delete shard invoked"); - Slice slice = clusterState.getSlice(collectionName, sliceId); + Slice slice = clusterState.getCollection(collectionName).getSlice(sliceId); + if (slice == null) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + "No shard with name " + sliceId + " exists for collection " + collectionName); - if (slice == null) { - if (clusterState.hasCollection(collectionName)) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "No shard with name " + sliceId + " exists for collection " + collectionName); - } else { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No collection with the specified name exists: " + collectionName); - } - } // For now, only allow for deletions of Inactive slices or custom hashes (range==null). // TODO: Add check for range gaps on Slice deletion final Slice.State state = slice.getState(); diff --git a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java index 588262d02fe..491ae00def5 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java +++ b/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java @@ -31,6 +31,7 @@ import org.apache.solr.cloud.overseer.OverseerAction; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.SolrZkClient; @@ -498,8 +499,7 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase { ZkStateReader zkStateReader = zkController.getZkStateReader(); zkStateReader.forceUpdateCollection(collection); ClusterState clusterState = zkStateReader.getClusterState(); - Replica rep = (clusterState == null) ? null - : clusterState.getReplica(collection, leaderProps.getStr(ZkStateReader.CORE_NODE_NAME_PROP)); + Replica rep = getReplica(clusterState, collection, leaderProps.getStr(ZkStateReader.CORE_NODE_NAME_PROP)); if (rep != null && rep.getState() != Replica.State.ACTIVE && rep.getState() != Replica.State.RECOVERING) { log.debug("We have become the leader after core registration but are not in an ACTIVE state - publishing ACTIVE"); @@ -507,6 +507,13 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase { } } } + + private Replica getReplica(ClusterState clusterState, String collectionName, String replicaName) { + if (clusterState == null) return null; + final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName); + if (docCollection == null) return null; + return docCollection.getReplica(replicaName); + } public void checkLIR(String coreName, boolean allReplicasInLine) throws InterruptedException, KeeperException, IOException { @@ -604,7 +611,8 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase { long timeoutAt = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeoutms, TimeUnit.MILLISECONDS); final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE; - Slice slices = zkController.getClusterState().getSlice(collection, shardId); + DocCollection docCollection = zkController.getClusterState().getCollectionOrNull(collection); + Slice slices = (docCollection == null) ? null : docCollection.getSlice(shardId); int cnt = 0; while (!isClosed && !cc.isShutDown()) { // wait for everyone to be up @@ -649,7 +657,8 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase { } Thread.sleep(500); - slices = zkController.getClusterState().getSlice(collection, shardId); + docCollection = zkController.getClusterState().getCollectionOrNull(collection); + slices = (docCollection == null) ? null : docCollection.getSlice(shardId); cnt++; } return false; @@ -658,9 +667,10 @@ final class ShardLeaderElectionContext extends ShardLeaderElectionContextBase { // returns true if all replicas are found to be up, false if not private boolean areAllReplicasParticipating() throws InterruptedException { final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE; - Slice slices = zkController.getClusterState().getSlice(collection, shardId); + final DocCollection docCollection = zkController.getClusterState().getCollectionOrNull(collection); - if (slices != null) { + if (docCollection != null && docCollection.getSlice(shardId) != null) { + final Slice slices = docCollection.getSlice(shardId); int found = 0; try { found = zkClient.getChildren(shardsElectZkPath, null, true).size(); diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java index 2c55f3cdec4..095578f35b9 100644 --- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java +++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionMessageHandler.java @@ -522,10 +522,9 @@ public class OverseerCollectionMessageHandler implements OverseerMessageHandler String waitForCoreNodeName(String collectionName, String msgNodeName, String msgCore) { int retryCount = 320; while (retryCount-- > 0) { - Map slicesMap = zkStateReader.getClusterState() - .getSlicesMap(collectionName); - if (slicesMap != null) { - + final DocCollection docCollection = zkStateReader.getClusterState().getCollectionOrNull(collectionName); + if (docCollection != null && docCollection.getSlicesMap() != null) { + Map slicesMap = docCollection.getSlicesMap(); for (Slice slice : slicesMap.values()) { for (Replica replica : slice.getReplicas()) { // TODO: for really large clusters, we could 'index' on this diff --git a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java index 563cccf546a..8a6b99b2c95 100644 --- a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java +++ b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java @@ -545,8 +545,8 @@ public class RecoveryStrategy implements Runnable, Closeable { zkController.publish(core.getCoreDescriptor(), Replica.State.RECOVERING); - final Slice slice = zkStateReader.getClusterState().getSlice(cloudDesc.getCollectionName(), - cloudDesc.getShardId()); + final Slice slice = zkStateReader.getClusterState().getCollection(cloudDesc.getCollectionName()) + .getSlice(cloudDesc.getShardId()); try { prevSendPreRecoveryHttpUriRequest.abort(); diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkController.java b/solr/core/src/java/org/apache/solr/cloud/ZkController.java index a529e94454d..491808e9c40 100644 --- a/solr/core/src/java/org/apache/solr/cloud/ZkController.java +++ b/solr/core/src/java/org/apache/solr/cloud/ZkController.java @@ -942,7 +942,8 @@ public class ZkController { try { // If we're a preferred leader, insert ourselves at the head of the queue boolean joinAtHead = false; - Replica replica = zkStateReader.getClusterState().getReplica(collection, coreZkNodeName); + final DocCollection docCollection = zkStateReader.getClusterState().getCollectionOrNull(collection); + Replica replica = (docCollection == null) ? null : docCollection.getReplica(coreZkNodeName); if (replica != null) { joinAtHead = replica.getBool(SliceMutator.PREFERRED_LEADER_PROP, false); } @@ -994,7 +995,7 @@ public class ZkController { // we will call register again after zk expiration and on reload if (!afterExpiration && !core.isReloaded() && ulog != null && !isTlogReplicaAndNotLeader) { // disable recovery in case shard is in construction state (for shard splits) - Slice slice = getClusterState().getSlice(collection, shardId); + Slice slice = getClusterState().getCollection(collection).getSlice(shardId); if (slice.getState() != Slice.State.CONSTRUCTION || !isLeader) { Future recoveryFuture = core.getUpdateHandler().getUpdateLog().recoverFromLog(); if (recoveryFuture != null) { @@ -1354,7 +1355,8 @@ public class ZkController { assert false : "No collection was specified [" + collection + "]"; return; } - Replica replica = zkStateReader.getClusterState().getReplica(collection, coreNodeName); + final DocCollection docCollection = zkStateReader.getClusterState().getCollectionOrNull(collection); + Replica replica = (docCollection == null) ? null : docCollection.getReplica(coreNodeName); if (replica == null || replica.getType() != Type.PULL) { ElectionContext context = electionContexts.remove(new ContextKey(collection, coreNodeName)); @@ -1408,10 +1410,10 @@ public class ZkController { int retryCount = 320; log.debug("look for our core node name"); while (retryCount-- > 0) { - Map slicesMap = zkStateReader.getClusterState() - .getSlicesMap(descriptor.getCloudDescriptor().getCollectionName()); - if (slicesMap != null) { - + final DocCollection docCollection = zkStateReader.getClusterState() + .getCollectionOrNull(descriptor.getCloudDescriptor().getCollectionName()); + if (docCollection != null && docCollection.getSlicesMap() != null) { + final Map slicesMap = docCollection.getSlicesMap(); for (Slice slice : slicesMap.values()) { for (Replica replica : slice.getReplicas()) { // TODO: for really large clusters, we could 'index' on this diff --git a/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java index de861645c5c..38d386670fb 100644 --- a/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/CdcrRequestHandler.java @@ -42,6 +42,7 @@ import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.cloud.ZkController; import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkCoreNodeProps; import org.apache.solr.common.cloud.ZkNodeProps; @@ -397,7 +398,8 @@ public class CdcrRequestHandler extends RequestHandlerBase implements SolrCoreAw log.warn("Error when updating cluster state", e); } ClusterState cstate = zkController.getClusterState(); - Collection shards = cstate.getActiveSlices(collection); + DocCollection docCollection = cstate.getCollectionOrNull(collection); + Collection shards = docCollection == null? null : docCollection.getActiveSlices(); ExecutorService parallelExecutor = ExecutorUtil.newMDCAwareCachedThreadPool(new DefaultSolrThreadFactory("parallelCdcrExecutor")); diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java index 92a773aac09..8345b3c52f2 100644 --- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java @@ -47,6 +47,7 @@ import org.apache.solr.cloud.ZkController; import org.apache.solr.cloud.ZkSolrResourceLoader; import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.params.CommonParams; @@ -789,8 +790,9 @@ public class SolrConfigHandler extends RequestHandlerBase implements SolrCoreAwa List activeReplicaCoreUrls = new ArrayList<>(); ClusterState clusterState = zkController.getZkStateReader().getClusterState(); Set liveNodes = clusterState.getLiveNodes(); - Collection activeSlices = clusterState.getActiveSlices(collection); - if (activeSlices != null && activeSlices.size() > 0) { + final DocCollection docCollection = clusterState.getCollectionOrNull(collection); + if (docCollection != null && docCollection.getActiveSlices() != null && docCollection.getActiveSlices().size() > 0) { + final Collection activeSlices = docCollection.getActiveSlices(); for (Slice next : activeSlices) { Map replicasMap = next.getReplicasMap(); if (replicasMap != null) { diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java index 3afad2fb7b7..e2880a848f1 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java @@ -1036,8 +1036,10 @@ public class CollectionsHandler extends RequestHandlerBase implements Permission for (int i = 0; i < numRetries; i++) { ClusterState clusterState = zkStateReader.getClusterState(); - Collection shards = clusterState.getSlices(collectionName); - if (shards != null) { + final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName); + + if (docCollection != null && docCollection.getSlices() != null) { + Collection shards = docCollection.getSlices(); replicaNotAlive = null; for (Slice shard : shards) { Collection replicas; diff --git a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java index 1c98f5814c1..33b1642b21d 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/component/HttpShardHandler.java @@ -387,7 +387,7 @@ public class HttpShardHandler extends ShardHandler { } else { if (clusterState == null) { clusterState = zkController.getClusterState(); - slices = clusterState.getSlicesMap(cloudDescriptor.getCollectionName()); + slices = clusterState.getCollection(cloudDescriptor.getCollectionName()).getSlicesMap(); } String sliceName = rb.slices[i]; diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java index 078edfda356..c141e2680df 100644 --- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java +++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java @@ -53,6 +53,7 @@ import org.apache.solr.cloud.ZkSolrResourceLoader; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException.ErrorCode; import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.SolrZkClient; @@ -287,8 +288,9 @@ public final class ManagedIndexSchema extends IndexSchema { ZkStateReader zkStateReader = zkController.getZkStateReader(); ClusterState clusterState = zkStateReader.getClusterState(); Set liveNodes = clusterState.getLiveNodes(); - Collection activeSlices = clusterState.getActiveSlices(collection); - if (activeSlices != null && activeSlices.size() > 0) { + final DocCollection docCollection = clusterState.getCollectionOrNull(collection); + if (docCollection != null && docCollection.getActiveSlices() != null && docCollection.getActiveSlices().size() > 0) { + final Collection activeSlices = docCollection.getActiveSlices(); for (Slice next : activeSlices) { Map replicasMap = next.getReplicasMap(); if (replicasMap != null) { diff --git a/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java index a49195cdea4..6715fd88180 100644 --- a/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/join/ScoreJoinQParserPlugin.java @@ -310,7 +310,7 @@ public class ScoreJoinQParserPlugin extends QParserPlugin { String fromReplica = null; String nodeName = zkController.getNodeName(); - for (Slice slice : zkController.getClusterState().getActiveSlices(fromIndex)) { + for (Slice slice : zkController.getClusterState().getCollection(fromIndex).getActiveSlices()) { if (fromReplica != null) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "SolrCloud join: multiple shards not yet supported " + fromIndex); diff --git a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java index a548c05389c..0a6e62cfbcd 100644 --- a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java +++ b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java @@ -898,7 +898,8 @@ public class HttpSolrCall { private String getRemotCoreUrl(String collectionName, String origCorename) { ClusterState clusterState = cores.getZkController().getClusterState(); - Collection slices = clusterState.getActiveSlices(collectionName); + final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName); + Collection slices = (docCollection != null) ? docCollection.getActiveSlices() : null; boolean byCoreName = false; if (slices == null) { diff --git a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java index 5269ecb7144..45f6ea2ebca 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DistributedUpdateProcessor.java @@ -551,8 +551,9 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor { if (id == null) { for (Entry entry : routingRules.entrySet()) { String targetCollectionName = entry.getValue().getTargetCollectionName(); - Collection activeSlices = cstate.getActiveSlices(targetCollectionName); - if (activeSlices != null && !activeSlices.isEmpty()) { + final DocCollection docCollection = cstate.getCollectionOrNull(targetCollectionName); + if (docCollection != null && docCollection.getActiveSlices() != null && !docCollection.getActiveSlices().isEmpty()) { + final Collection activeSlices = docCollection.getActiveSlices(); Slice any = activeSlices.iterator().next(); if (nodes == null) nodes = new ArrayList<>(); nodes.add(new StdNode(new ZkCoreNodeProps(any.getLeader()))); @@ -1973,11 +1974,12 @@ public class DistributedUpdateProcessor extends UpdateRequestProcessor { private List getCollectionUrls(SolrQueryRequest req, String collection, EnumSet types) { ClusterState clusterState = req.getCore() .getCoreContainer().getZkController().getClusterState(); - Map slices = clusterState.getSlicesMap(collection); - if (slices == null) { + final DocCollection docCollection = clusterState.getCollectionOrNull(collection); + if (collection == null || docCollection.getSlicesMap() == null) { throw new ZooKeeperException(ErrorCode.BAD_REQUEST, "Could not find collection in zk: " + clusterState); } + Map slices = docCollection.getSlicesMap(); final List urls = new ArrayList<>(slices.size()); for (Map.Entry sliceEntry : slices.entrySet()) { Slice replicas = slices.get(sliceEntry.getKey()); diff --git a/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java index 9c2d08d9aae..4cbea31f524 100644 --- a/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java +++ b/solr/core/src/java/org/apache/solr/update/processor/DocExpirationUpdateProcessorFactory.java @@ -469,7 +469,7 @@ public final class DocExpirationUpdateProcessorFactory CloudDescriptor desc = core.getCoreDescriptor().getCloudDescriptor(); String col = desc.getCollectionName(); - List slices = new ArrayList(zk.getClusterState().getActiveSlices(col)); + List slices = new ArrayList(zk.getClusterState().getCollection(col).getActiveSlices()); Collections.sort(slices, COMPARE_SLICES_BY_NAME); if (slices.isEmpty()) { log.error("Collection {} has no active Slices?", col); diff --git a/solr/core/src/java/org/apache/solr/util/SolrCLI.java b/solr/core/src/java/org/apache/solr/util/SolrCLI.java index 657b402bed9..da53afff09c 100644 --- a/solr/core/src/java/org/apache/solr/util/SolrCLI.java +++ b/solr/core/src/java/org/apache/solr/util/SolrCLI.java @@ -106,6 +106,7 @@ import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrException; import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.SolrZkClient; @@ -1170,10 +1171,11 @@ public class SolrCLI { ClusterState clusterState = zkStateReader.getClusterState(); Set liveNodes = clusterState.getLiveNodes(); - Collection slices = clusterState.getSlices(collection); - if (slices == null) + final DocCollection docCollection = clusterState.getCollectionOrNull(collection); + if (docCollection == null || docCollection.getSlices() == null) throw new IllegalArgumentException("Collection "+collection+" not found!"); + Collection slices = docCollection.getSlices(); // Test http code using a HEAD request first, fail fast if authentication failure String urlForColl = zkStateReader.getLeaderUrl(collection, slices.stream().findFirst().get().getName(), 1000); attemptHttpHead(urlForColl, cloudSolrClient.getHttpClient()); diff --git a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java index c095c25ff9b..66b786648d7 100644 --- a/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/BasicDistributedZkTest.java @@ -649,7 +649,7 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase { protected ZkCoreNodeProps getLeaderUrlFromZk(String collection, String slice) { ClusterState clusterState = getCommonCloudSolrClient().getZkStateReader().getClusterState(); - ZkNodeProps leader = clusterState.getLeader(collection, slice); + ZkNodeProps leader = clusterState.getCollection(collection).getLeader(slice); if (leader == null) { throw new RuntimeException("Could not find leader:" + collection + " " + slice); } @@ -850,10 +850,11 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase { // we added a role of none on these creates - check for it ZkStateReader zkStateReader = getCommonCloudSolrClient().getZkStateReader(); zkStateReader.forceUpdateCollection(oneInstanceCollection2); - Map slices = zkStateReader.getClusterState().getSlicesMap(oneInstanceCollection2); + Map slices = zkStateReader.getClusterState().getCollection(oneInstanceCollection2).getSlicesMap(); assertNotNull(slices); - ZkCoreNodeProps props = new ZkCoreNodeProps(getCommonCloudSolrClient().getZkStateReader().getClusterState().getLeader(oneInstanceCollection2, "shard1")); + ZkCoreNodeProps props = new ZkCoreNodeProps(getCommonCloudSolrClient().getZkStateReader().getClusterState() + .getCollection(oneInstanceCollection2).getLeader("shard1")); // now test that unloading a core gets us a new leader try (HttpSolrClient unloadClient = getHttpSolrClient(jettys.get(0).getBaseUrl().toString(), 15000, 60000)) { diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java index 7e840daf2eb..1a0138639d6 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeyShardSplitTest.java @@ -60,7 +60,7 @@ public class ChaosMonkeyShardSplitTest extends ShardSplitTest { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); final DocRouter router = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getRouter(); - Slice shard1 = clusterState.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1); + Slice shard1 = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(SHARD1); DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange(); final List ranges = router.partitionRange(2, shard1Range); final int[] docCounts = new int[ranges.size()]; diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java index b38d459b6cc..58e0a60aa25 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateTest.java @@ -62,8 +62,8 @@ public class ClusterStateTest extends SolrTestCaseJ4 { assertEquals("Provided liveNodes not used properly", 2, loadedClusterState .getLiveNodes().size()); assertEquals("No collections found", 2, loadedClusterState.getCollectionsMap().size()); - assertEquals("Properties not copied properly", replica.getStr("prop1"), loadedClusterState.getSlice("collection1", "shard1").getReplicasMap().get("node1").getStr("prop1")); - assertEquals("Properties not copied properly", replica.getStr("prop2"), loadedClusterState.getSlice("collection1", "shard1").getReplicasMap().get("node1").getStr("prop2")); + assertEquals("Properties not copied properly", replica.getStr("prop1"), loadedClusterState.getCollection("collection1").getSlice("shard1").getReplicasMap().get("node1").getStr("prop1")); + assertEquals("Properties not copied properly", replica.getStr("prop2"), loadedClusterState.getCollection("collection1").getSlice("shard1").getReplicasMap().get("node1").getStr("prop2")); loadedClusterState = ClusterState.load(-1, new byte[0], liveNodes); diff --git a/solr/core/src/test/org/apache/solr/cloud/ClusterStateUpdateTest.java b/solr/core/src/test/org/apache/solr/cloud/ClusterStateUpdateTest.java index df1cd8eca05..3658430b069 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ClusterStateUpdateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ClusterStateUpdateTest.java @@ -24,6 +24,7 @@ import java.util.Set; import org.apache.lucene.util.LuceneTestCase.Slow; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkStateReader; @@ -75,7 +76,8 @@ public class ClusterStateUpdateTest extends SolrCloudTestCase { Map slices = null; for (int i = 75; i > 0; i--) { clusterState2 = zkController2.getClusterState(); - slices = clusterState2.getSlicesMap("testcore"); + DocCollection docCollection = clusterState2.getCollectionOrNull("testcore"); + slices = docCollection == null ? null : docCollection.getSlicesMap(); if (slices != null && slices.containsKey("shard1") && slices.get("shard1").getReplicasMap().size() > 0) { diff --git a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java index 1474b5c52cd..c3dc44b0f7c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/CollectionsAPIAsyncDistributedZkTest.java @@ -125,7 +125,7 @@ public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase { assertSame("AddReplica did not complete", RequestStatusState.COMPLETED, state); //cloudClient watch might take a couple of seconds to reflect it - Slice shard1 = client.getZkStateReader().getClusterState().getSlice(collection, "shard1"); + Slice shard1 = client.getZkStateReader().getClusterState().getCollection(collection).getSlice("shard1"); int count = 0; while (shard1.getReplicas().size() != 2) { if (count++ > 1000) { @@ -163,7 +163,7 @@ public class CollectionsAPIAsyncDistributedZkTest extends SolrCloudTestCase { } } - shard1 = client.getZkStateReader().getClusterState().getSlice(collection, "shard1"); + shard1 = client.getZkStateReader().getClusterState().getCollection(collection).getSlice("shard1"); String replicaName = shard1.getReplicas().iterator().next().getName(); state = CollectionAdminRequest.deleteReplica(collection, "shard1", replicaName) .processAndWait(client, MAX_TIMEOUT_SECONDS); diff --git a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java index b60609fd36d..749abdf804d 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ForceLeaderTest.java @@ -102,7 +102,7 @@ public class ForceLeaderTest extends HttpPartitionTest { "; clusterState: " + printClusterStateInfo(), 0, numActiveReplicas); int numReplicasOnLiveNodes = 0; - for (Replica rep : clusterState.getSlice(testCollectionName, SHARD1).getReplicas()) { + for (Replica rep : clusterState.getCollection(testCollectionName).getSlice(SHARD1).getReplicas()) { if (clusterState.getLiveNodes().contains(rep.getNodeName())) { numReplicasOnLiveNodes++; } @@ -110,8 +110,8 @@ public class ForceLeaderTest extends HttpPartitionTest { assertEquals(2, numReplicasOnLiveNodes); log.info("Before forcing leader: " + printClusterStateInfo()); // Assert there is no leader yet - assertNull("Expected no leader right now. State: " + clusterState.getSlice(testCollectionName, SHARD1), - clusterState.getSlice(testCollectionName, SHARD1).getLeader()); + assertNull("Expected no leader right now. State: " + clusterState.getCollection(testCollectionName).getSlice(SHARD1), + clusterState.getCollection(testCollectionName).getSlice(SHARD1).getLeader()); assertSendDocFails(3); @@ -122,9 +122,9 @@ public class ForceLeaderTest extends HttpPartitionTest { cloudClient.getZkStateReader().forceUpdateCollection(testCollectionName); clusterState = cloudClient.getZkStateReader().getClusterState(); - log.info("After forcing leader: " + clusterState.getSlice(testCollectionName, SHARD1)); + log.info("After forcing leader: " + clusterState.getCollection(testCollectionName).getSlice(SHARD1)); // we have a leader - Replica newLeader = clusterState.getSlice(testCollectionName, SHARD1).getLeader(); + Replica newLeader = clusterState.getCollectionOrNull(testCollectionName).getSlice(SHARD1).getLeader(); assertNotNull(newLeader); // leader is active assertEquals(State.ACTIVE, newLeader.getState()); @@ -216,7 +216,7 @@ public class ForceLeaderTest extends HttpPartitionTest { boolean transition = false; for (int counter = 10; counter > 0; counter--) { clusterState = zkStateReader.getClusterState(); - Replica newLeader = clusterState.getSlice(collection, slice).getLeader(); + Replica newLeader = clusterState.getCollection(collection).getSlice(slice).getLeader(); if (newLeader == null) { transition = true; break; @@ -250,7 +250,7 @@ public class ForceLeaderTest extends HttpPartitionTest { Replica.State replicaState = null; for (int counter = 10; counter > 0; counter--) { ClusterState clusterState = zkStateReader.getClusterState(); - replicaState = clusterState.getSlice(collection, slice).getReplica(replica.getName()).getState(); + replicaState = clusterState.getCollection(collection).getSlice(slice).getReplica(replica.getName()).getState(); if (replicaState == state) { transition = true; break; @@ -349,7 +349,7 @@ public class ForceLeaderTest extends HttpPartitionTest { for (State lirState : lirStates) if (Replica.State.DOWN.equals(lirState) == false) allDown = false; - if (allDown && clusterState.getSlice(collectionName, shard).getLeader() == null) { + if (allDown && clusterState.getCollection(collectionName).getSlice(shard).getLeader() == null) { break; } log.warn("Attempt " + i + ", waiting on for 1 sec to settle down in the steady state. State: " + @@ -381,7 +381,7 @@ public class ForceLeaderTest extends HttpPartitionTest { waitForRecoveriesToFinish(collection, cloudClient.getZkStateReader(), true); cloudClient.getZkStateReader().forceUpdateCollection(collection); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); - log.info("After bringing back leader: " + clusterState.getSlice(collection, SHARD1)); + log.info("After bringing back leader: " + clusterState.getCollection(collection).getSlice(SHARD1)); int numActiveReplicas = getNumberOfActiveReplicas(clusterState, collection, SHARD1); assertEquals(1+notLeaders.size(), numActiveReplicas); log.info("Sending doc "+docid+"..."); @@ -423,7 +423,7 @@ public class ForceLeaderTest extends HttpPartitionTest { protected int getNumberOfActiveReplicas(ClusterState clusterState, String collection, String sliceId) { int numActiveReplicas = 0; // Assert all replicas are active - for (Replica rep : clusterState.getSlice(collection, sliceId).getReplicas()) { + for (Replica rep : clusterState.getCollection(collection).getSlice(sliceId).getReplicas()) { if (rep.getState().equals(State.ACTIVE)) { numActiveReplicas++; } diff --git a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java index aeaa7e9fb1d..0a56e767951 100644 --- a/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/HttpPartitionTest.java @@ -42,6 +42,7 @@ import org.apache.solr.client.solrj.request.UpdateRequest; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.SolrZkClient; @@ -233,7 +234,7 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase { ZkStateReader zkr = cloudClient.getZkStateReader(); zkr.forceUpdateCollection(testCollectionName);; // force the state to be fresh ClusterState cs = zkr.getClusterState(); - Collection slices = cs.getActiveSlices(testCollectionName); + Collection slices = cs.getCollection(testCollectionName).getActiveSlices(); Slice slice = slices.iterator().next(); Replica partitionedReplica = slice.getReplica(notLeaders.get(0).getName()); assertEquals("The partitioned replica did not get marked down", @@ -522,7 +523,7 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase { ZkStateReader zkr = cloudClient.getZkStateReader(); ClusterState cs = zkr.getClusterState(); assertNotNull(cs); - for (Slice shard : cs.getActiveSlices(testCollectionName)) { + for (Slice shard : cs.getCollection(testCollectionName).getActiveSlices()) { if (shard.getName().equals(shardId)) { for (Replica replica : shard.getReplicas()) { final Replica.State state = replica.getState(); @@ -629,14 +630,15 @@ public class HttpPartitionTest extends AbstractFullDistribZkTestBase { ZkStateReader zkr = cloudClient.getZkStateReader(); zkr.forceUpdateCollection(testCollectionName); ClusterState cs = zkr.getClusterState(); - Collection slices = cs.getActiveSlices(testCollectionName); boolean allReplicasUp = false; long waitMs = 0L; long maxWaitMs = maxWaitSecs * 1000L; while (waitMs < maxWaitMs && !allReplicasUp) { cs = cloudClient.getZkStateReader().getClusterState(); assertNotNull(cs); - Slice shard = cs.getSlice(testCollectionName, shardId); + final DocCollection docCollection = cs.getCollectionOrNull(testCollectionName); + assertNotNull(docCollection); + Slice shard = docCollection.getSlice(shardId); assertNotNull("No Slice for "+shardId, shard); allReplicasUp = true; // assume true diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java index f6abb54b77f..1fbf98cf233 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java @@ -205,7 +205,9 @@ public class OverseerTest extends SolrTestCaseJ4 { } private String getShardId(String collection, String coreNodeName) { - Map slices = zkStateReader.getClusterState().getSlicesMap(collection); + DocCollection dc = zkStateReader.getClusterState().getCollectionOrNull(collection); + if (dc == null) return null; + Map slices = dc.getSlicesMap(); if (slices != null) { for (Slice slice : slices.values()) { for (Replica replica : slice.getReplicas()) { @@ -291,10 +293,10 @@ public class OverseerTest extends SolrTestCaseJ4 { for (int i = 0; i < numShards; i++) { assertNotNull("shard got no id?", zkController.publishState(COLLECTION, "core" + (i+1), "node" + (i+1), "shard"+((i%3)+1), Replica.State.ACTIVE, 3)); } - final Map rmap = reader.getClusterState().getSlice(COLLECTION, "shard1").getReplicasMap(); + final Map rmap = reader.getClusterState().getCollection(COLLECTION).getSlice("shard1").getReplicasMap(); assertEquals(rmap.toString(), 2, rmap.size()); - assertEquals(rmap.toString(), 2, reader.getClusterState().getSlice(COLLECTION, "shard2").getReplicasMap().size()); - assertEquals(rmap.toString(), 2, reader.getClusterState().getSlice(COLLECTION, "shard3").getReplicasMap().size()); + assertEquals(rmap.toString(), 2, reader.getClusterState().getCollection(COLLECTION).getSlice("shard2").getReplicasMap().size()); + assertEquals(rmap.toString(), 2, reader.getClusterState().getCollection(COLLECTION).getSlice("shard3").getReplicasMap().size()); //make sure leaders are in cloud state assertNotNull(reader.getLeaderUrl(COLLECTION, "shard1", 15000)); @@ -343,9 +345,9 @@ public class OverseerTest extends SolrTestCaseJ4 { "node" + (i+1), "shard"+((i%3)+1) , Replica.State.ACTIVE, 3)); } - assertEquals(1, reader.getClusterState().getSlice(COLLECTION, "shard1").getReplicasMap().size()); - assertEquals(1, reader.getClusterState().getSlice(COLLECTION, "shard2").getReplicasMap().size()); - assertEquals(1, reader.getClusterState().getSlice(COLLECTION, "shard3").getReplicasMap().size()); + assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard1").getReplicasMap().size()); + assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard2").getReplicasMap().size()); + assertEquals(1, reader.getClusterState().getCollection(COLLECTION).getSlice("shard3").getReplicasMap().size()); //make sure leaders are in cloud state assertNotNull(reader.getLeaderUrl(COLLECTION, "shard1", 15000)); @@ -364,9 +366,9 @@ public class OverseerTest extends SolrTestCaseJ4 { "core" + (i + 1), "node" + (i + 1),"shard"+((i%3)+1), Replica.State.ACTIVE, 3)); } - assertEquals(1, reader.getClusterState().getSlice("collection2", "shard1").getReplicasMap().size()); - assertEquals(1, reader.getClusterState().getSlice("collection2", "shard2").getReplicasMap().size()); - assertEquals(1, reader.getClusterState().getSlice("collection2", "shard3").getReplicasMap().size()); + assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard1").getReplicasMap().size()); + assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard2").getReplicasMap().size()); + assertEquals(1, reader.getClusterState().getCollection("collection2").getSlice("shard3").getReplicasMap().size()); //make sure leaders are in cloud state assertNotNull(reader.getLeaderUrl("collection2", "shard1", 15000)); @@ -474,7 +476,7 @@ public class OverseerTest extends SolrTestCaseJ4 { private void verifyShardLeader(ZkStateReader reader, String collection, String shard, String expectedCore) throws InterruptedException, KeeperException { int maxIterations = 200; while(maxIterations-->0) { - ZkNodeProps props = reader.getClusterState().getLeader(collection, shard); + ZkNodeProps props = reader.getClusterState().getCollection(collection).getLeader(shard); if(props!=null) { if(expectedCore.equals(props.getStr(ZkStateReader.CORE_NAME_PROP))) { return; @@ -482,8 +484,9 @@ public class OverseerTest extends SolrTestCaseJ4 { } Thread.sleep(200); } - - assertEquals("Unexpected shard leader coll:" + collection + " shard:" + shard, expectedCore, (reader.getClusterState().getLeader(collection, shard)!=null)?reader.getClusterState().getLeader(collection, shard).getStr(ZkStateReader.CORE_NAME_PROP):null); + DocCollection docCollection = reader.getClusterState().getCollection(collection); + assertEquals("Unexpected shard leader coll:" + collection + " shard:" + shard, expectedCore, + (docCollection.getLeader(shard)!=null)?docCollection.getLeader(shard).getStr(ZkStateReader.CORE_NAME_PROP):null); } @Test @@ -553,7 +556,7 @@ public class OverseerTest extends SolrTestCaseJ4 { assertEquals("Live nodes count does not match", 1, reader .getClusterState().getLiveNodes().size()); assertEquals(shard+" replica count does not match", 1, reader.getClusterState() - .getSlice(COLLECTION, shard).getReplicasMap().size()); + .getCollection(COLLECTION).getSlice(shard).getReplicasMap().size()); version = getClusterStateVersion(zkClient); mockController.publishState(COLLECTION, core, core_node, "shard1", null, numShards); while (version == getClusterStateVersion(zkClient)); @@ -1004,12 +1007,13 @@ public class OverseerTest extends SolrTestCaseJ4 { queue.offer(Utils.toJSON(m)); for(int i=0;i<100;i++) { - Slice s = reader.getClusterState().getSlice(COLLECTION, "shard1"); + DocCollection dc = reader.getClusterState().getCollectionOrNull(COLLECTION); + Slice s = dc == null? null : dc.getSlice("shard1"); if(s!=null && s.getReplicasMap().size()==3) break; Thread.sleep(100); } - assertNotNull(reader.getClusterState().getSlice(COLLECTION, "shard1")); - assertEquals(3, reader.getClusterState().getSlice(COLLECTION, "shard1").getReplicasMap().size()); + assertNotNull(reader.getClusterState().getCollection(COLLECTION).getSlice("shard1")); + assertEquals(3, reader.getClusterState().getCollection(COLLECTION).getSlice("shard1").getReplicasMap().size()); } finally { close(overseerClient); close(zkClient); @@ -1278,7 +1282,7 @@ public class OverseerTest extends SolrTestCaseJ4 { { int iterationsLeft = 100; while (iterationsLeft-- > 0) { - final Slice slice = zkStateReader.getClusterState().getSlice(COLLECTION, "shard"+ss); + final Slice slice = zkStateReader.getClusterState().getCollection(COLLECTION).getSlice("shard"+ss); if (null == slice || null == slice.getReplicasMap().get("core_node"+N)) { break; } diff --git a/solr/core/src/test/org/apache/solr/cloud/ReplicaPropertiesBase.java b/solr/core/src/test/org/apache/solr/cloud/ReplicaPropertiesBase.java index fe83a8431a3..0cb3f8f87dd 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ReplicaPropertiesBase.java +++ b/solr/core/src/test/org/apache/solr/cloud/ReplicaPropertiesBase.java @@ -57,7 +57,8 @@ public abstract class ReplicaPropertiesBase extends AbstractFullDistribZkTestBas Replica replica = null; for (int idx = 0; idx < 300; ++idx) { clusterState = client.getZkStateReader().getClusterState(); - replica = clusterState.getReplica(collectionName, replicaName); + final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName); + replica = (docCollection == null) ? null : docCollection.getReplica(replicaName); if (replica == null) { fail("Could not find collection/replica pair! " + collectionName + "/" + replicaName); } @@ -82,7 +83,8 @@ public abstract class ReplicaPropertiesBase extends AbstractFullDistribZkTestBas for (int idx = 0; idx < 300; ++idx) { // Keep trying while Overseer writes the ZK state for up to 30 seconds. clusterState = client.getZkStateReader().getClusterState(); - replica = clusterState.getReplica(collectionName, replicaName); + final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName); + replica = (docCollection == null) ? null : docCollection.getReplica(replicaName); if (replica == null) { fail("Could not find collection/replica pair! " + collectionName + "/" + replicaName); } diff --git a/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java b/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java index 21f5b3cbe1b..1593e784f47 100644 --- a/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/ShardSplitTest.java @@ -537,7 +537,7 @@ public class ShardSplitTest extends BasicDistributedZkTest { private void incompleteOrOverlappingCustomRangeTest() throws Exception { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); final DocRouter router = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getRouter(); - Slice shard1 = clusterState.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1); + Slice shard1 = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(SHARD1); DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange(); List subRanges = new ArrayList<>(); @@ -581,7 +581,7 @@ public class ShardSplitTest extends BasicDistributedZkTest { private void splitByUniqueKeyTest() throws Exception { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); final DocRouter router = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getRouter(); - Slice shard1 = clusterState.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, SHARD1); + Slice shard1 = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(SHARD1); DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange(); List subRanges = new ArrayList<>(); if (usually()) { @@ -696,7 +696,7 @@ public class ShardSplitTest extends BasicDistributedZkTest { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); final DocRouter router = clusterState.getCollection(collectionName).getRouter(); - Slice shard1 = clusterState.getSlice(collectionName, SHARD1); + Slice shard1 = clusterState.getCollection(collectionName).getSlice(SHARD1); DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange(); final List ranges = router.partitionRange(2, shard1Range); final int[] docCounts = new int[ranges.size()]; @@ -772,7 +772,7 @@ public class ShardSplitTest extends BasicDistributedZkTest { ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); final DocRouter router = clusterState.getCollection(collectionName).getRouter(); - Slice shard1 = clusterState.getSlice(collectionName, SHARD1); + Slice shard1 = clusterState.getCollection(collectionName).getSlice(SHARD1); DocRouter.Range shard1Range = shard1.getRange() != null ? shard1.getRange() : router.fullRange(); final List ranges = ((CompositeIdRouter) router).partitionRangeByKey(splitKey, shard1Range); final int[] docCounts = new int[ranges.size()]; @@ -835,8 +835,8 @@ public class ShardSplitTest extends BasicDistributedZkTest { for (i = 0; i < 10; i++) { ZkStateReader zkStateReader = cloudClient.getZkStateReader(); clusterState = zkStateReader.getClusterState(); - slice1_0 = clusterState.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, "shard1_0"); - slice1_1 = clusterState.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, "shard1_1"); + slice1_0 = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice("shard1_0"); + slice1_1 = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice("shard1_1"); if (slice1_0.getState() == Slice.State.ACTIVE && slice1_1.getState() == Slice.State.ACTIVE) { break; } @@ -887,7 +887,7 @@ public class ShardSplitTest extends BasicDistributedZkTest { query.set("distrib", false); ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); - Slice slice = clusterState.getSlice(AbstractDistribZkTestBase.DEFAULT_COLLECTION, shard); + Slice slice = clusterState.getCollection(AbstractDistribZkTestBase.DEFAULT_COLLECTION).getSlice(shard); long[] numFound = new long[slice.getReplicasMap().size()]; int c = 0; for (Replica replica : slice.getReplicas()) { diff --git a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java index 9c839f6f9f0..e2174dea498 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java @@ -388,7 +388,7 @@ public class SharedFSAutoReplicaFailoverTest extends AbstractFullDistribZkTestBa private void assertSingleReplicationAndShardSize(String collection, int numSlices) { Collection slices; - slices = cloudClient.getZkStateReader().getClusterState().getActiveSlices(collection); + slices = cloudClient.getZkStateReader().getClusterState().getCollection(collection).getActiveSlices(); assertEquals(numSlices, slices.size()); for (Slice slice : slices) { assertEquals(1, slice.getReplicas().size()); @@ -397,7 +397,7 @@ public class SharedFSAutoReplicaFailoverTest extends AbstractFullDistribZkTestBa private void assertSliceAndReplicaCount(String collection) { Collection slices; - slices = cloudClient.getZkStateReader().getClusterState().getActiveSlices(collection); + slices = cloudClient.getZkStateReader().getClusterState().getCollection(collection).getActiveSlices(); assertEquals(2, slices.size()); for (Slice slice : slices) { assertEquals(2, slice.getReplicas().size()); diff --git a/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java b/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java index 6a633fafe0b..f2356431bfe 100644 --- a/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/SliceStateTest.java @@ -53,6 +53,6 @@ public class SliceStateTest extends SolrTestCaseJ4 { byte[] bytes = Utils.toJSON(clusterState); ClusterState loadedClusterState = ClusterState.load(-1, bytes, liveNodes); - assertSame("Default state not set to active", Slice.State.ACTIVE, loadedClusterState.getSlice("collection1", "shard1").getState()); + assertSame("Default state not set to active", Slice.State.ACTIVE, loadedClusterState.getCollection("collection1").getSlice("shard1").getState()); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java index ec2dac62c8c..d85b1392899 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudDeleteByQuery.java @@ -126,7 +126,7 @@ public class TestCloudDeleteByQuery extends SolrCloudTestCase { urlMap.put(nodeKey, jettyURL.toString()); } ClusterState clusterState = zkStateReader.getClusterState(); - for (Slice slice : clusterState.getSlices(COLLECTION_NAME)) { + for (Slice slice : clusterState.getCollection(COLLECTION_NAME).getSlices()) { String shardName = slice.getName(); Replica leader = slice.getLeader(); assertNotNull("slice has null leader: " + slice.toString(), leader); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java index abe4ed3d037..037a3e6806e 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestCollectionAPI.java @@ -37,6 +37,7 @@ import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.params.CollectionParams; @@ -768,10 +769,11 @@ public class TestCollectionAPI extends ReplicaPropertiesBase { client.getZkStateReader().forceUpdateCollection(collectionName); ClusterState clusterState = client.getZkStateReader().getClusterState(); - Replica replica = clusterState.getReplica(collectionName, replicaName); - if (replica == null) { + final DocCollection docCollection = clusterState.getCollectionOrNull(collectionName); + if (docCollection == null || docCollection.getReplica(replicaName) == null) { fail("Could not find collection/replica pair! " + collectionName + "/" + replicaName); } + Replica replica = docCollection.getReplica(replicaName); Map propMap = new HashMap<>(); for (String prop : props) { propMap.put(prop, replica.getStr(prop)); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionWithEmptyReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionWithEmptyReplica.java index 84b39014c4d..5221e8185dc 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionWithEmptyReplica.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestLeaderElectionWithEmptyReplica.java @@ -98,7 +98,7 @@ public class TestLeaderElectionWithEmptyReplica extends SolrCloudTestCase { (n, c) -> DocCollection.isFullyActive(n, c, 1, 2)); // now query each replica and check for consistency - assertConsistentReplicas(solrClient, solrClient.getZkStateReader().getClusterState().getSlice(COLLECTION_NAME, "shard1")); + assertConsistentReplicas(solrClient, solrClient.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).getSlice("shard1")); // sanity check that documents still exist QueryResponse response = solrClient.query(new SolrQuery("*:*")); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLeaderInitiatedRecoveryThread.java b/solr/core/src/test/org/apache/solr/cloud/TestLeaderInitiatedRecoveryThread.java index 11858f828b7..b6efa533aef 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestLeaderInitiatedRecoveryThread.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestLeaderInitiatedRecoveryThread.java @@ -57,7 +57,7 @@ public class TestLeaderInitiatedRecoveryThread extends AbstractFullDistribZkTest } } assertNotNull(notLeader); - Replica replica = cloudClient.getZkStateReader().getClusterState().getReplica(DEFAULT_COLLECTION, notLeader.coreNodeName); + Replica replica = cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getReplica(notLeader.coreNodeName); ZkCoreNodeProps replicaCoreNodeProps = new ZkCoreNodeProps(replica); MockCoreDescriptor cd = new MockCoreDescriptor() { @@ -175,7 +175,7 @@ public class TestLeaderInitiatedRecoveryThread extends AbstractFullDistribZkTest timeOut = new TimeOut(30, TimeUnit.SECONDS); while (!timeOut.hasTimedOut()) { - Replica r = cloudClient.getZkStateReader().getClusterState().getReplica(DEFAULT_COLLECTION, replica.getName()); + Replica r = cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getReplica(replica.getName()); if (r.getState() == Replica.State.DOWN) { break; } @@ -183,7 +183,7 @@ public class TestLeaderInitiatedRecoveryThread extends AbstractFullDistribZkTest } assertNull(zkController.getLeaderInitiatedRecoveryState(DEFAULT_COLLECTION, SHARD1, replica.getName())); - assertEquals(Replica.State.DOWN, cloudClient.getZkStateReader().getClusterState().getReplica(DEFAULT_COLLECTION, replica.getName()).getState()); + assertEquals(Replica.State.DOWN, cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getReplica(replica.getName()).getState()); /* 6. Test that non-leader cannot set LIR nodes diff --git a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java new file mode 100644 index 00000000000..d4a131bc02c --- /dev/null +++ b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java @@ -0,0 +1,389 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.cloud; + +import java.lang.invoke.MethodHandles; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; + +import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; +import org.apache.lucene.util.LuceneTestCase; +import org.apache.lucene.util.LuceneTestCase.SuppressSysoutChecks; +import org.apache.solr.SolrTestCaseJ4; +import org.apache.solr.client.solrj.SolrQuery; +import org.apache.solr.client.solrj.embedded.JettyConfig; +import org.apache.solr.client.solrj.embedded.JettyConfig.Builder; +import org.apache.solr.client.solrj.embedded.JettySolrRunner; +import org.apache.solr.client.solrj.impl.CloudSolrClient; +import org.apache.solr.client.solrj.request.CollectionAdminRequest; +import org.apache.solr.client.solrj.response.QueryResponse; +import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.Replica; +import org.apache.solr.common.cloud.Slice; +import org.apache.solr.common.cloud.SolrZkClient; +import org.apache.solr.common.cloud.ZkStateReader; +import org.apache.solr.core.CoreDescriptor; +import org.apache.solr.index.TieredMergePolicyFactory; +import org.apache.solr.util.RevertDefaultThreadHandlerRule; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Test of the MiniSolrCloudCluster functionality. Keep in mind, + * MiniSolrCloudCluster is designed to be used outside of the Lucene test + * hierarchy. + */ +@SuppressSysoutChecks(bugUrl = "Solr logs to JUL") +public class TestMiniSolrCloudCluster extends LuceneTestCase { + + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + protected int NUM_SERVERS = 5; + protected int NUM_SHARDS = 2; + protected int REPLICATION_FACTOR = 2; + + public TestMiniSolrCloudCluster () { + NUM_SERVERS = 5; + NUM_SHARDS = 2; + REPLICATION_FACTOR = 2; + } + + @BeforeClass + public static void setupHackNumerics() { // SOLR-10916 + SolrTestCaseJ4.randomizeNumericTypesProperties(); + } + @AfterClass + public static void clearHackNumerics() { // SOLR-10916 + SolrTestCaseJ4.clearNumericTypesProperties(); + } + + @Rule + public TestRule solrTestRules = RuleChain + .outerRule(new SystemPropertiesRestoreRule()); + + @ClassRule + public static TestRule solrClassRules = RuleChain.outerRule( + new SystemPropertiesRestoreRule()).around( + new RevertDefaultThreadHandlerRule()); + + private MiniSolrCloudCluster createMiniSolrCloudCluster() throws Exception { + Builder jettyConfig = JettyConfig.builder(); + jettyConfig.waitForLoadingCoresToFinish(null); + return new MiniSolrCloudCluster(NUM_SERVERS, createTempDir(), jettyConfig.build()); + } + + private void createCollection(MiniSolrCloudCluster miniCluster, String collectionName, String createNodeSet, String asyncId, + Boolean indexToPersist, Map collectionProperties) throws Exception { + String configName = "solrCloudCollectionConfig"; + miniCluster.uploadConfigSet(SolrTestCaseJ4.TEST_PATH().resolve("collection1").resolve("conf"), configName); + + final boolean persistIndex = (indexToPersist != null ? indexToPersist.booleanValue() : random().nextBoolean()); + if (collectionProperties == null) { + collectionProperties = new HashMap<>(); + } + collectionProperties.putIfAbsent(CoreDescriptor.CORE_CONFIG, "solrconfig-tlog.xml"); + collectionProperties.putIfAbsent("solr.tests.maxBufferedDocs", "100000"); + collectionProperties.putIfAbsent("solr.tests.ramBufferSizeMB", "100"); + // use non-test classes so RandomizedRunner isn't necessary + collectionProperties.putIfAbsent(SolrTestCaseJ4.SYSTEM_PROPERTY_SOLR_TESTS_MERGEPOLICYFACTORY, TieredMergePolicyFactory.class.getName()); + collectionProperties.putIfAbsent("solr.tests.mergeScheduler", "org.apache.lucene.index.ConcurrentMergeScheduler"); + collectionProperties.putIfAbsent("solr.directoryFactory", (persistIndex ? "solr.StandardDirectoryFactory" : "solr.RAMDirectoryFactory")); + + if (asyncId == null) { + CollectionAdminRequest.createCollection(collectionName, configName, NUM_SHARDS, REPLICATION_FACTOR) + .setCreateNodeSet(createNodeSet) + .setProperties(collectionProperties) + .process(miniCluster.getSolrClient()); + } + else { + CollectionAdminRequest.createCollection(collectionName, configName, NUM_SHARDS, REPLICATION_FACTOR) + .setCreateNodeSet(createNodeSet) + .setProperties(collectionProperties) + .processAndWait(miniCluster.getSolrClient(), 30); + } + } + + @Test + public void testCollectionCreateSearchDelete() throws Exception { + + final String collectionName = "testcollection"; + MiniSolrCloudCluster miniCluster = createMiniSolrCloudCluster(); + + final CloudSolrClient cloudSolrClient = miniCluster.getSolrClient(); + + try { + assertNotNull(miniCluster.getZkServer()); + List jettys = miniCluster.getJettySolrRunners(); + assertEquals(NUM_SERVERS, jettys.size()); + for (JettySolrRunner jetty : jettys) { + assertTrue(jetty.isRunning()); + } + + // shut down a server + log.info("#### Stopping a server"); + JettySolrRunner stoppedServer = miniCluster.stopJettySolrRunner(0); + assertTrue(stoppedServer.isStopped()); + assertEquals(NUM_SERVERS - 1, miniCluster.getJettySolrRunners().size()); + + // create a server + log.info("#### Starting a server"); + JettySolrRunner startedServer = miniCluster.startJettySolrRunner(); + assertTrue(startedServer.isRunning()); + assertEquals(NUM_SERVERS, miniCluster.getJettySolrRunners().size()); + + // create collection + log.info("#### Creating a collection"); + final String asyncId = (random().nextBoolean() ? null : "asyncId("+collectionName+".create)="+random().nextInt()); + createCollection(miniCluster, collectionName, null, asyncId, null, null); + + ZkStateReader zkStateReader = miniCluster.getSolrClient().getZkStateReader(); + AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); + + // modify/query collection + log.info("#### updating a querying collection"); + cloudSolrClient.setDefaultCollection(collectionName); + SolrInputDocument doc = new SolrInputDocument(); + doc.setField("id", "1"); + cloudSolrClient.add(doc); + cloudSolrClient.commit(); + SolrQuery query = new SolrQuery(); + query.setQuery("*:*"); + QueryResponse rsp = cloudSolrClient.query(query); + assertEquals(1, rsp.getResults().getNumFound()); + + // remove a server not hosting any replicas + zkStateReader.forceUpdateCollection(collectionName); + ClusterState clusterState = zkStateReader.getClusterState(); + HashMap jettyMap = new HashMap(); + for (JettySolrRunner jetty : miniCluster.getJettySolrRunners()) { + String key = jetty.getBaseUrl().toString().substring((jetty.getBaseUrl().getProtocol() + "://").length()); + jettyMap.put(key, jetty); + } + Collection slices = clusterState.getCollection(collectionName).getSlices(); + // track the servers not host repliacs + for (Slice slice : slices) { + jettyMap.remove(slice.getLeader().getNodeName().replace("_solr", "/solr")); + for (Replica replica : slice.getReplicas()) { + jettyMap.remove(replica.getNodeName().replace("_solr", "/solr")); + } + } + assertTrue("Expected to find a node without a replica", jettyMap.size() > 0); + log.info("#### Stopping a server"); + JettySolrRunner jettyToStop = jettyMap.entrySet().iterator().next().getValue(); + jettys = miniCluster.getJettySolrRunners(); + for (int i = 0; i < jettys.size(); ++i) { + if (jettys.get(i).equals(jettyToStop)) { + miniCluster.stopJettySolrRunner(i); + assertEquals(NUM_SERVERS - 1, miniCluster.getJettySolrRunners().size()); + } + } + + // re-create a server (to restore original NUM_SERVERS count) + log.info("#### Starting a server"); + startedServer = miniCluster.startJettySolrRunner(jettyToStop); + assertTrue(startedServer.isRunning()); + assertEquals(NUM_SERVERS, miniCluster.getJettySolrRunners().size()); + + CollectionAdminRequest.deleteCollection(collectionName).process(miniCluster.getSolrClient()); + + // create it again + String asyncId2 = (random().nextBoolean() ? null : "asyncId("+collectionName+".create)="+random().nextInt()); + createCollection(miniCluster, collectionName, null, asyncId2, null, null); + AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); + + // check that there's no left-over state + assertEquals(0, cloudSolrClient.query(new SolrQuery("*:*")).getResults().getNumFound()); + cloudSolrClient.add(doc); + cloudSolrClient.commit(); + assertEquals(1, cloudSolrClient.query(new SolrQuery("*:*")).getResults().getNumFound()); + + } + finally { + miniCluster.shutdown(); + } + } + + @Test + public void testCollectionCreateWithoutCoresThenDelete() throws Exception { + + final String collectionName = "testSolrCloudCollectionWithoutCores"; + final MiniSolrCloudCluster miniCluster = createMiniSolrCloudCluster(); + final CloudSolrClient cloudSolrClient = miniCluster.getSolrClient(); + + try { + assertNotNull(miniCluster.getZkServer()); + assertFalse(miniCluster.getJettySolrRunners().isEmpty()); + + // create collection + final String asyncId = (random().nextBoolean() ? null : "asyncId("+collectionName+".create)="+random().nextInt()); + createCollection(miniCluster, collectionName, OverseerCollectionMessageHandler.CREATE_NODE_SET_EMPTY, asyncId, null, null); + + try (SolrZkClient zkClient = new SolrZkClient + (miniCluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); + ZkStateReader zkStateReader = new ZkStateReader(zkClient)) { + zkStateReader.createClusterStateWatchersAndUpdate(); + + // wait for collection to appear + AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); + + // check the collection's corelessness + { + int coreCount = 0; + for (Map.Entry entry : zkStateReader.getClusterState() + .getCollection(collectionName).getSlicesMap().entrySet()) { + coreCount += entry.getValue().getReplicasMap().entrySet().size(); + } + assertEquals(0, coreCount); + } + + } + } + finally { + miniCluster.shutdown(); + } + } + + @Test + public void testStopAllStartAll() throws Exception { + + final String collectionName = "testStopAllStartAllCollection"; + + final MiniSolrCloudCluster miniCluster = createMiniSolrCloudCluster(); + + try { + assertNotNull(miniCluster.getZkServer()); + List jettys = miniCluster.getJettySolrRunners(); + assertEquals(NUM_SERVERS, jettys.size()); + for (JettySolrRunner jetty : jettys) { + assertTrue(jetty.isRunning()); + } + + createCollection(miniCluster, collectionName, null, null, Boolean.TRUE, null); + final CloudSolrClient cloudSolrClient = miniCluster.getSolrClient(); + cloudSolrClient.setDefaultCollection(collectionName); + final SolrQuery query = new SolrQuery("*:*"); + final SolrInputDocument doc = new SolrInputDocument(); + + try (SolrZkClient zkClient = new SolrZkClient + (miniCluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); + ZkStateReader zkStateReader = new ZkStateReader(zkClient)) { + zkStateReader.createClusterStateWatchersAndUpdate(); + AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); + + // modify collection + final int numDocs = 1 + random().nextInt(10); + for (int ii = 1; ii <= numDocs; ++ii) { + doc.setField("id", ""+ii); + cloudSolrClient.add(doc); + if (ii*2 == numDocs) cloudSolrClient.commit(); + } + cloudSolrClient.commit(); + // query collection + { + final QueryResponse rsp = cloudSolrClient.query(query); + assertEquals(numDocs, rsp.getResults().getNumFound()); + } + + // the test itself + zkStateReader.forceUpdateCollection(collectionName); + final ClusterState clusterState = zkStateReader.getClusterState(); + + final HashSet leaderIndices = new HashSet(); + final HashSet followerIndices = new HashSet(); + { + final HashMap shardLeaderMap = new HashMap(); + for (final Slice slice : clusterState.getCollection(collectionName).getSlices()) { + for (final Replica replica : slice.getReplicas()) { + shardLeaderMap.put(replica.getNodeName().replace("_solr", "/solr"), Boolean.FALSE); + } + shardLeaderMap.put(slice.getLeader().getNodeName().replace("_solr", "/solr"), Boolean.TRUE); + } + for (int ii = 0; ii < jettys.size(); ++ii) { + final URL jettyBaseUrl = jettys.get(ii).getBaseUrl(); + final String jettyBaseUrlString = jettyBaseUrl.toString().substring((jettyBaseUrl.getProtocol() + "://").length()); + final Boolean isLeader = shardLeaderMap.get(jettyBaseUrlString); + if (Boolean.TRUE.equals(isLeader)) { + leaderIndices.add(new Integer(ii)); + } else if (Boolean.FALSE.equals(isLeader)) { + followerIndices.add(new Integer(ii)); + } // else neither leader nor follower i.e. node without a replica (for our collection) + } + } + final List leaderIndicesList = new ArrayList(leaderIndices); + final List followerIndicesList = new ArrayList(followerIndices); + + // first stop the followers (in no particular order) + Collections.shuffle(followerIndicesList, random()); + for (Integer ii : followerIndicesList) { + if (!leaderIndices.contains(ii)) { + miniCluster.stopJettySolrRunner(jettys.get(ii.intValue())); + } + } + + // then stop the leaders (again in no particular order) + Collections.shuffle(leaderIndicesList, random()); + for (Integer ii : leaderIndicesList) { + miniCluster.stopJettySolrRunner(jettys.get(ii.intValue())); + } + + // calculate restart order + final List restartIndicesList = new ArrayList(); + Collections.shuffle(leaderIndicesList, random()); + restartIndicesList.addAll(leaderIndicesList); + Collections.shuffle(followerIndicesList, random()); + restartIndicesList.addAll(followerIndicesList); + if (random().nextBoolean()) Collections.shuffle(restartIndicesList, random()); + + // and then restart jettys in that order + for (Integer ii : restartIndicesList) { + final JettySolrRunner jetty = jettys.get(ii.intValue()); + if (!jetty.isRunning()) { + miniCluster.startJettySolrRunner(jetty); + assertTrue(jetty.isRunning()); + } + } + AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); + + zkStateReader.forceUpdateCollection(collectionName); + + // re-query collection + { + final QueryResponse rsp = cloudSolrClient.query(query); + assertEquals(numDocs, rsp.getResults().getNumFound()); + } + + } + } + finally { + miniCluster.shutdown(); + } + } + +} diff --git a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java index d3fc6794cf9..415f80f563b 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestRandomRequestDistribution.java @@ -159,7 +159,7 @@ public class TestRandomRequestDistribution extends AbstractFullDistribZkTestBase Replica leader = null; Replica notLeader = null; - Collection replicas = cloudClient.getZkStateReader().getClusterState().getSlice("football", "shard1").getReplicas(); + Collection replicas = cloudClient.getZkStateReader().getClusterState().getCollection("football").getSlice("shard1").getReplicas(); for (Replica replica : replicas) { if (replica.getStr(ZkStateReader.LEADER_PROP) != null) { leader = replica; diff --git a/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java b/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java index fc2a7e25740..9a9af9722c6 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestReplicaProperties.java @@ -193,7 +193,7 @@ public class TestReplicaProperties extends ReplicaPropertiesBase { for (int idx = 0; idx < 300; ++idx) { // Keep trying while Overseer writes the ZK state for up to 30 seconds. lastFailMsg = ""; ClusterState clusterState = client.getZkStateReader().getClusterState(); - for (Slice slice : clusterState.getSlices(collectionName)) { + for (Slice slice : clusterState.getCollection(collectionName).getSlices()) { Boolean foundLeader = false; Boolean foundPreferred = false; for (Replica replica : slice.getReplicas()) { diff --git a/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java b/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java index 4233e9d84c2..08e0eb58f2c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestShortCircuitedRequests.java @@ -45,7 +45,7 @@ public class TestShortCircuitedRequests extends AbstractFullDistribZkTestBase { doQuery("a!doc1", "q", "*:*", ShardParams._ROUTE_, "a!"); // can go to any random node // query shard3 directly with _route_=a! so that we trigger the short circuited request path - Replica shard3 = cloudClient.getZkStateReader().getClusterState().getLeader(DEFAULT_COLLECTION, "shard3"); + Replica shard3 = cloudClient.getZkStateReader().getClusterState().getCollection(DEFAULT_COLLECTION).getLeader("shard3"); String nodeName = shard3.getNodeName(); SolrClient shard3Client = getClient(nodeName); QueryResponse response = shard3Client.query(new SolrQuery("*:*").add(ShardParams._ROUTE_, "a!").add(ShardParams.SHARDS_INFO, "true")); diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java index 864172082df..8a847244213 100644 --- a/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java +++ b/solr/core/src/test/org/apache/solr/cloud/TestTolerantUpdateProcessorCloud.java @@ -134,7 +134,7 @@ public class TestTolerantUpdateProcessorCloud extends SolrCloudTestCase { } zkStateReader.forceUpdateCollection(COLLECTION_NAME); ClusterState clusterState = zkStateReader.getClusterState(); - for (Slice slice : clusterState.getSlices(COLLECTION_NAME)) { + for (Slice slice : clusterState.getCollection(COLLECTION_NAME).getSlices()) { String shardName = slice.getName(); Replica leader = slice.getLeader(); assertNotNull("slice has null leader: " + slice.toString(), leader); diff --git a/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java index 45f8b81fc80..28a0a4e52ae 100644 --- a/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/UnloadDistributedZkTest.java @@ -25,6 +25,7 @@ import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.client.solrj.request.CoreAdminRequest.Unload; import org.apache.solr.common.SolrInputDocument; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.ZkCoreNodeProps; @@ -75,7 +76,8 @@ public class UnloadDistributedZkTest extends BasicDistributedZkTest { final TimeOut timeout = new TimeOut(45, TimeUnit.SECONDS); Boolean isPresent = null; // null meaning "don't know" while (null == isPresent || shouldBePresent != isPresent.booleanValue()) { - final Collection slices = getCommonCloudSolrClient().getZkStateReader().getClusterState().getSlices(collectionName); + final DocCollection docCollection = getCommonCloudSolrClient().getZkStateReader().getClusterState().getCollectionOrNull(collectionName); + final Collection slices = (docCollection != null) ? docCollection.getSlices() : null; if (timeout.hasTimedOut()) { printLayout(); fail("checkCoreNamePresenceAndSliceCount failed:" diff --git a/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java b/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java index 62e3f5ff593..329de797130 100644 --- a/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java @@ -33,6 +33,7 @@ import org.apache.solr.client.solrj.request.QueryRequest; import org.apache.solr.cloud.BasicDistributedZkTest; import org.apache.solr.cloud.ChaosMonkey; import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.params.CollectionParams.CollectionAction; @@ -160,8 +161,10 @@ public class StressHdfsTest extends BasicDistributedZkTest { // data dirs should be in zk, SOLR-8913 ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); - Slice slice = clusterState.getSlice(DELETE_DATA_DIR_COLLECTION, "shard1"); - assertNotNull(clusterState.getSlices(DELETE_DATA_DIR_COLLECTION).toString(), slice); + final DocCollection docCollection = clusterState.getCollectionOrNull(DELETE_DATA_DIR_COLLECTION); + assertNotNull("Could not find :"+DELETE_DATA_DIR_COLLECTION, docCollection); + Slice slice = docCollection.getSlice("shard1"); + assertNotNull(docCollection.getSlices().toString(), slice); Collection replicas = slice.getReplicas(); for (Replica replica : replicas) { assertNotNull(replica.getProperties().toString(), replica.get("dataDir")); diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java new file mode 100644 index 00000000000..f21df6c94cb --- /dev/null +++ b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java @@ -0,0 +1,717 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.schema; +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.List; +import java.util.SortedMap; +import java.util.TreeMap; +import java.util.concurrent.TimeUnit; + +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.embedded.JettySolrRunner; +import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.cloud.AbstractFullDistribZkTestBase; +import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.Replica; +import org.apache.solr.common.cloud.Slice; +import org.apache.solr.common.cloud.SolrZkClient; +import org.apache.solr.common.cloud.ZkCoreNodeProps; +import org.apache.solr.util.BaseTestHarness; +import org.apache.solr.util.RestTestHarness; +import org.apache.zookeeper.data.Stat; +import org.eclipse.jetty.servlet.ServletHolder; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; +import org.restlet.ext.servlet.ServerServlet; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +@Ignore +public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestBase { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private static final String SUCCESS_XPATH = "/response/lst[@name='responseHeader']/int[@name='status'][.='0']"; + private static final String PUT_DYNAMIC_FIELDNAME = "newdynamicfieldPut"; + private static final String POST_DYNAMIC_FIELDNAME = "newdynamicfieldPost"; + private static final String PUT_FIELDNAME = "newfieldPut"; + private static final String POST_FIELDNAME = "newfieldPost"; + private static final String PUT_FIELDTYPE = "newfieldtypePut"; + private static final String POST_FIELDTYPE = "newfieldtypePost"; + + public TestCloudManagedSchemaConcurrent() { + super(); + sliceCount = 4; + } + + @BeforeClass + public static void initSysProperties() { + System.setProperty("managed.schema.mutable", "true"); + System.setProperty("enable.update.log", "true"); + } + + @Override + public void distribTearDown() throws Exception { + super.distribTearDown(); + for (RestTestHarness h : restTestHarnesses) { + h.close(); + } + } + + @Override + protected String getCloudSolrConfig() { + return "solrconfig-managed-schema.xml"; + } + + @Override + public SortedMap getExtraServlets() { + final SortedMap extraServlets = new TreeMap<>(); + final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class); + solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi"); + extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...' + return extraServlets; + } + + private List restTestHarnesses = new ArrayList<>(); + + private void setupHarnesses() { + for (final SolrClient client : clients) { + RestTestHarness harness = new RestTestHarness(() -> ((HttpSolrClient)client).getBaseURL()); + restTestHarnesses.add(harness); + } + } + + private static void verifySuccess(String request, String response) throws Exception { + String result = BaseTestHarness.validateXPath(response, SUCCESS_XPATH); + if (null != result) { + String msg = "QUERY FAILED: xpath=" + result + " request=" + request + " response=" + response; + log.error(msg); + fail(msg); + } + } + + private static void addFieldPut(RestTestHarness publisher, String fieldName, int updateTimeoutSecs) throws Exception { + final String content = "{\"type\":\"text\",\"stored\":\"false\"}"; + String request = "/schema/fields/" + fieldName + "?wt=xml"; + if (updateTimeoutSecs > 0) + request += "&updateTimeoutSecs="+updateTimeoutSecs; + String response = publisher.put(request, content); + verifySuccess(request, response); + } + + private static void addFieldPost(RestTestHarness publisher, String fieldName, int updateTimeoutSecs) throws Exception { + final String content = "[{\"name\":\""+fieldName+"\",\"type\":\"text\",\"stored\":\"false\"}]"; + String request = "/schema/fields/?wt=xml"; + if (updateTimeoutSecs > 0) + request += "&updateTimeoutSecs="+updateTimeoutSecs; + String response = publisher.post(request, content); + verifySuccess(request, response); + } + + private static void addDynamicFieldPut(RestTestHarness publisher, String dynamicFieldPattern, int updateTimeoutSecs) throws Exception { + final String content = "{\"type\":\"text\",\"stored\":\"false\"}"; + String request = "/schema/dynamicfields/" + dynamicFieldPattern + "?wt=xml"; + if (updateTimeoutSecs > 0) + request += "&updateTimeoutSecs="+updateTimeoutSecs; + String response = publisher.put(request, content); + verifySuccess(request, response); + } + + private static void addDynamicFieldPost(RestTestHarness publisher, String dynamicFieldPattern, int updateTimeoutSecs) throws Exception { + final String content = "[{\"name\":\""+dynamicFieldPattern+"\",\"type\":\"text\",\"stored\":\"false\"}]"; + String request = "/schema/dynamicfields/?wt=xml"; + if (updateTimeoutSecs > 0) + request += "&updateTimeoutSecs="+updateTimeoutSecs; + String response = publisher.post(request, content); + verifySuccess(request, response); + } + + private static void copyField(RestTestHarness publisher, String source, String dest, int updateTimeoutSecs) throws Exception { + final String content = "[{\"source\":\""+source+"\",\"dest\":[\""+dest+"\"]}]"; + String request = "/schema/copyfields/?wt=xml"; + if (updateTimeoutSecs > 0) + request += "&updateTimeoutSecs="+updateTimeoutSecs; + String response = publisher.post(request, content); + verifySuccess(request, response); + } + + private static void addFieldTypePut(RestTestHarness publisher, String typeName, int updateTimeoutSecs) throws Exception { + final String content = "{\"class\":\""+RANDOMIZED_NUMERIC_FIELDTYPES.get(Integer.class)+"\"}"; + String request = "/schema/fieldtypes/" + typeName + "?wt=xml"; + if (updateTimeoutSecs > 0) + request += "&updateTimeoutSecs="+updateTimeoutSecs; + String response = publisher.put(request, content); + verifySuccess(request, response); + } + + private static void addFieldTypePost(RestTestHarness publisher, String typeName, int updateTimeoutSecs) throws Exception { + final String content = "[{\"name\":\""+typeName+"\",\"class\":\""+RANDOMIZED_NUMERIC_FIELDTYPES.get(Integer.class)+"\"}]"; + String request = "/schema/fieldtypes/?wt=xml"; + if (updateTimeoutSecs > 0) + request += "&updateTimeoutSecs="+updateTimeoutSecs; + String response = publisher.post(request, content); + verifySuccess(request, response); + } + + private String[] getExpectedFieldResponses(Info info) { + String[] expectedAddFields = new String[1 + info.numAddFieldPuts + info.numAddFieldPosts]; + expectedAddFields[0] = SUCCESS_XPATH; + + for (int i = 0; i < info.numAddFieldPuts; ++i) { + String newFieldName = PUT_FIELDNAME + info.fieldNameSuffix + i; + expectedAddFields[1 + i] + = "/response/arr[@name='fields']/lst/str[@name='name'][.='" + newFieldName + "']"; + } + + for (int i = 0; i < info.numAddFieldPosts; ++i) { + String newFieldName = POST_FIELDNAME + info.fieldNameSuffix + i; + expectedAddFields[1 + info.numAddFieldPuts + i] + = "/response/arr[@name='fields']/lst/str[@name='name'][.='" + newFieldName + "']"; + } + + return expectedAddFields; + } + + private String[] getExpectedDynamicFieldResponses(Info info) { + String[] expectedAddDynamicFields = new String[1 + info.numAddDynamicFieldPuts + info.numAddDynamicFieldPosts]; + expectedAddDynamicFields[0] = SUCCESS_XPATH; + + for (int i = 0; i < info.numAddDynamicFieldPuts; ++i) { + String newDynamicFieldPattern = PUT_DYNAMIC_FIELDNAME + info.fieldNameSuffix + i + "_*"; + expectedAddDynamicFields[1 + i] + = "/response/arr[@name='dynamicFields']/lst/str[@name='name'][.='" + newDynamicFieldPattern + "']"; + } + + for (int i = 0; i < info.numAddDynamicFieldPosts; ++i) { + String newDynamicFieldPattern = POST_DYNAMIC_FIELDNAME + info.fieldNameSuffix + i + "_*"; + expectedAddDynamicFields[1 + info.numAddDynamicFieldPuts + i] + = "/response/arr[@name='dynamicFields']/lst/str[@name='name'][.='" + newDynamicFieldPattern + "']"; + } + + return expectedAddDynamicFields; + } + + private String[] getExpectedCopyFieldResponses(Info info) { + ArrayList expectedCopyFields = new ArrayList<>(); + expectedCopyFields.add(SUCCESS_XPATH); + for (CopyFieldInfo cpi : info.copyFields) { + String expectedSourceName = cpi.getSourceField(); + expectedCopyFields.add + ("/response/arr[@name='copyFields']/lst/str[@name='source'][.='" + expectedSourceName + "']"); + String expectedDestName = cpi.getDestField(); + expectedCopyFields.add + ("/response/arr[@name='copyFields']/lst/str[@name='dest'][.='" + expectedDestName + "']"); + } + + return expectedCopyFields.toArray(new String[expectedCopyFields.size()]); + } + + private String[] getExpectedFieldTypeResponses(Info info) { + String[] expectedAddFieldTypes = new String[1 + info.numAddFieldTypePuts + info.numAddFieldTypePosts]; + expectedAddFieldTypes[0] = SUCCESS_XPATH; + + for (int i = 0; i < info.numAddFieldTypePuts; ++i) { + String newFieldTypeName = PUT_FIELDTYPE + info.fieldNameSuffix + i; + expectedAddFieldTypes[1 + i] + = "/response/arr[@name='fieldTypes']/lst/str[@name='name'][.='" + newFieldTypeName + "']"; + } + + for (int i = 0; i < info.numAddFieldTypePosts; ++i) { + String newFieldTypeName = POST_FIELDTYPE + info.fieldNameSuffix + i; + expectedAddFieldTypes[1 + info.numAddFieldTypePuts + i] + = "/response/arr[@name='fieldTypes']/lst/str[@name='name'][.='" + newFieldTypeName + "']"; + } + + return expectedAddFieldTypes; + } + + + @Test + @ShardsFixed(num = 8) + public void test() throws Exception { + verifyWaitForSchemaUpdateToPropagate(); + setupHarnesses(); + concurrentOperationsTest(); + schemaLockTest(); + } + + private static class Info { + int numAddFieldPuts = 0; + int numAddFieldPosts = 0; + int numAddDynamicFieldPuts = 0; + int numAddDynamicFieldPosts = 0; + int numAddFieldTypePuts = 0; + int numAddFieldTypePosts = 0; + public String fieldNameSuffix; + List copyFields = new ArrayList<>(); + + public Info(String fieldNameSuffix) { + this.fieldNameSuffix = fieldNameSuffix; + } + } + + private enum Operation { + PUT_AddField { + @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { + String fieldname = PUT_FIELDNAME + info.numAddFieldPuts++; + addFieldPut(publisher, fieldname, 15); + } + }, + POST_AddField { + @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { + String fieldname = POST_FIELDNAME + info.numAddFieldPosts++; + addFieldPost(publisher, fieldname, 15); + } + }, + PUT_AddDynamicField { + @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { + addDynamicFieldPut(publisher, PUT_DYNAMIC_FIELDNAME + info.numAddDynamicFieldPuts++ + "_*", 15); + } + }, + POST_AddDynamicField { + @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { + addDynamicFieldPost(publisher, POST_DYNAMIC_FIELDNAME + info.numAddDynamicFieldPosts++ + "_*", 15); + } + }, + POST_AddCopyField { + @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { + String sourceField = null; + String destField = null; + + int sourceType = random().nextInt(3); + if (sourceType == 0) { // existing + sourceField = "name"; + } else if (sourceType == 1) { // newly created + sourceField = "copySource" + fieldNum; + addFieldPut(publisher, sourceField, 15); + } else { // dynamic + sourceField = "*_dynamicSource" + fieldNum + "_t"; + // * only supported if both src and dst use it + destField = "*_dynamicDest" + fieldNum + "_t"; + } + + if (destField == null) { + int destType = random().nextInt(2); + if (destType == 0) { // existing + destField = "title"; + } else { // newly created + destField = "copyDest" + fieldNum; + addFieldPut(publisher, destField, 15); + } + } + copyField(publisher, sourceField, destField, 15); + info.copyFields.add(new CopyFieldInfo(sourceField, destField)); + } + }, + PUT_AddFieldType { + @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { + String typeName = PUT_FIELDTYPE + info.numAddFieldTypePuts++; + addFieldTypePut(publisher, typeName, 15); + } + }, + POST_AddFieldType { + @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { + String typeName = POST_FIELDTYPE + info.numAddFieldTypePosts++; + addFieldTypePost(publisher, typeName, 15); + } + }; + + + public abstract void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception; + + private static final Operation[] VALUES = values(); + public static Operation randomOperation() { + return VALUES[r.nextInt(VALUES.length)]; + } + } + + private void verifyWaitForSchemaUpdateToPropagate() throws Exception { + String testCollectionName = "collection1"; + + ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); + Replica shard1Leader = clusterState.getCollection(testCollectionName).getLeader("shard1"); + final String coreUrl = (new ZkCoreNodeProps(shard1Leader)).getCoreUrl(); + assertNotNull(coreUrl); + + RestTestHarness harness = new RestTestHarness(() -> coreUrl.endsWith("/") ? coreUrl.substring(0, coreUrl.length()-1) : coreUrl); + try { + addFieldTypePut(harness, "fooInt", 15); + } finally { + harness.close(); + } + + // go into ZK to get the version of the managed schema after the update + SolrZkClient zkClient = cloudClient.getZkStateReader().getZkClient(); + Stat stat = new Stat(); + String znodePath = "/configs/conf1/managed-schema"; + byte[] managedSchemaBytes = zkClient.getData(znodePath, null, stat, false); + int schemaZkVersion = stat.getVersion(); + + // now loop over all replicas and verify each has the same schema version + Replica randomReplicaNotLeader = null; + for (Slice slice : clusterState.getCollection(testCollectionName).getActiveSlices()) { + for (Replica replica : slice.getReplicas()) { + validateZkVersion(replica, schemaZkVersion, 0, false); + + // save a random replica to test zk watcher behavior + if (randomReplicaNotLeader == null && !replica.getName().equals(shard1Leader.getName())) + randomReplicaNotLeader = replica; + } + } + assertNotNull(randomReplicaNotLeader); + + // now update the data and then verify the znode watcher fires correctly + // before an after a zk session expiration (see SOLR-6249) + zkClient.setData(znodePath, managedSchemaBytes, schemaZkVersion, false); + stat = new Stat(); + managedSchemaBytes = zkClient.getData(znodePath, null, stat, false); + int updatedSchemaZkVersion = stat.getVersion(); + assertTrue(updatedSchemaZkVersion > schemaZkVersion); + validateZkVersion(randomReplicaNotLeader, updatedSchemaZkVersion, 2, true); + + // ok - looks like the watcher fired correctly on the replica + // now, expire that replica's zk session and then verify the watcher fires again (after reconnect) + JettySolrRunner randomReplicaJetty = + getJettyOnPort(getReplicaPort(randomReplicaNotLeader)); + assertNotNull(randomReplicaJetty); + chaosMonkey.expireSession(randomReplicaJetty); + + // update the data again to cause watchers to fire + zkClient.setData(znodePath, managedSchemaBytes, updatedSchemaZkVersion, false); + stat = new Stat(); + managedSchemaBytes = zkClient.getData(znodePath, null, stat, false); + updatedSchemaZkVersion = stat.getVersion(); + // give up to 10 secs for the replica to recover after zk session loss and see the update + validateZkVersion(randomReplicaNotLeader, updatedSchemaZkVersion, 10, true); + } + + /** + * Sends a GET request to get the zk schema version from a specific replica. + */ + protected void validateZkVersion(Replica replica, int schemaZkVersion, int waitSecs, boolean retry) throws Exception { + final String replicaUrl = (new ZkCoreNodeProps(replica)).getCoreUrl(); + RestTestHarness testHarness = new RestTestHarness(() -> replicaUrl.endsWith("/") ? replicaUrl.substring(0, replicaUrl.length()-1) : replicaUrl); + try { + long waitMs = waitSecs * 1000L; + if (waitMs > 0) Thread.sleep(waitMs); // wait a moment for the zk watcher to fire + + try { + testHarness.validateQuery("/schema/zkversion?wt=xml", "//zkversion=" + schemaZkVersion); + } catch (Exception exc) { + if (retry) { + // brief wait before retrying + Thread.sleep(waitMs > 0 ? waitMs : 2000L); + + testHarness.validateQuery("/schema/zkversion?wt=xml", "//zkversion=" + schemaZkVersion); + } else { + throw exc; + } + } + } finally { + testHarness.close(); + } + } + + private void concurrentOperationsTest() throws Exception { + + // First, add a bunch of fields and dynamic fields via PUT and POST, as well as copyFields, + // but do it fast enough and verify shards' schemas after all of them are added + int numFields = 100; + Info info = new Info(""); + + for (int fieldNum = 0; fieldNum <= numFields ; ++fieldNum) { + RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); + Operation.randomOperation().execute(publisher, fieldNum, info); + } + + String[] expectedAddFields = getExpectedFieldResponses(info); + String[] expectedAddDynamicFields = getExpectedDynamicFieldResponses(info); + String[] expectedCopyFields = getExpectedCopyFieldResponses(info); + String[] expectedAddFieldTypes = getExpectedFieldTypeResponses(info); + + boolean success = false; + long maxTimeoutMillis = 100000; + long startTime = System.nanoTime(); + String request = null; + String response = null; + String result = null; + + while ( ! success + && TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) { + Thread.sleep(100); + + for (RestTestHarness client : restTestHarnesses) { + // verify addFieldTypePuts and addFieldTypePosts + request = "/schema/fieldtypes?wt=xml"; + response = client.query(request); + result = BaseTestHarness.validateXPath(response, expectedAddFieldTypes); + if (result != null) { + break; + } + + // verify addFieldPuts and addFieldPosts + request = "/schema/fields?wt=xml"; + response = client.query(request); + result = BaseTestHarness.validateXPath(response, expectedAddFields); + if (result != null) { + break; + } + + // verify addDynamicFieldPuts and addDynamicFieldPosts + request = "/schema/dynamicfields?wt=xml"; + response = client.query(request); + result = BaseTestHarness.validateXPath(response, expectedAddDynamicFields); + if (result != null) { + break; + } + + // verify copyFields + request = "/schema/copyfields?wt=xml"; + response = client.query(request); + result = BaseTestHarness.validateXPath(response, expectedCopyFields); + if (result != null) { + break; + } + } + success = (result == null); + } + if ( ! success) { + String msg = "QUERY FAILED: xpath=" + result + " request=" + request + " response=" + response; + log.error(msg); + fail(msg); + } + } + + private abstract class PutPostThread extends Thread { + RestTestHarness harness; + Info info; + public String fieldName; + + public PutPostThread(RestTestHarness harness, Info info) { + this.harness = harness; + this.info = info; + } + + public abstract void run(); + } + + private class PutFieldThread extends PutPostThread { + public PutFieldThread(RestTestHarness harness, Info info) { + super(harness, info); + fieldName = PUT_FIELDNAME + "Thread" + info.numAddFieldPuts++; + } + public void run() { + try { + // don't have the client side wait for all replicas to see the update or that defeats the purpose + // of testing the locking support on the server-side + addFieldPut(harness, fieldName, -1); + } catch (Exception e) { + // log.error("###ACTUAL FAILURE!"); + throw new RuntimeException(e); + } + } + } + + private class PostFieldThread extends PutPostThread { + public PostFieldThread(RestTestHarness harness, Info info) { + super(harness, info); + fieldName = POST_FIELDNAME + "Thread" + info.numAddFieldPosts++; + } + public void run() { + try { + addFieldPost(harness, fieldName, -1); + } catch (Exception e) { + // log.error("###ACTUAL FAILURE!"); + throw new RuntimeException(e); + } + } + } + + private class PutFieldTypeThread extends PutPostThread { + public PutFieldTypeThread(RestTestHarness harness, Info info) { + super(harness, info); + fieldName = PUT_FIELDTYPE + "Thread" + info.numAddFieldTypePuts++; + } + public void run() { + try { + addFieldTypePut(harness, fieldName, -1); + } catch (Exception e) { + // log.error("###ACTUAL FAILURE!"); + throw new RuntimeException(e); + } + } + } + + private class PostFieldTypeThread extends PutPostThread { + public PostFieldTypeThread(RestTestHarness harness, Info info) { + super(harness, info); + fieldName = POST_FIELDTYPE + "Thread" + info.numAddFieldTypePosts++; + } + public void run() { + try { + addFieldTypePost(harness, fieldName, -1); + } catch (Exception e) { + // log.error("###ACTUAL FAILURE!"); + throw new RuntimeException(e); + } + } + } + + private class PutDynamicFieldThread extends PutPostThread { + public PutDynamicFieldThread(RestTestHarness harness, Info info) { + super(harness, info); + fieldName = PUT_FIELDNAME + "Thread" + info.numAddFieldPuts++; + } + public void run() { + try { + addFieldPut(harness, fieldName, -1); + } catch (Exception e) { + // log.error("###ACTUAL FAILURE!"); + throw new RuntimeException(e); + } + } + } + + private class PostDynamicFieldThread extends PutPostThread { + public PostDynamicFieldThread(RestTestHarness harness, Info info) { + super(harness, info); + fieldName = POST_FIELDNAME + "Thread" + info.numAddFieldPosts++; + } + public void run() { + try { + addFieldPost(harness, fieldName, -1); + } catch (Exception e) { + // log.error("###ACTUAL FAILURE!"); + throw new RuntimeException(e); + } + } + } + + private void schemaLockTest() throws Exception { + + // First, add a bunch of fields via PUT and POST, as well as copyFields, + // but do it fast enough and verify shards' schemas after all of them are added + int numFields = 5; + Info info = new Info("Thread"); + + for (int i = 0; i <= numFields ; ++i) { + // System.err.println("###ITERATION: " + i); + RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); + PostFieldThread postFieldThread = new PostFieldThread(publisher, info); + postFieldThread.start(); + + publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); + PutFieldThread putFieldThread = new PutFieldThread(publisher, info); + putFieldThread.start(); + + publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); + PostDynamicFieldThread postDynamicFieldThread = new PostDynamicFieldThread(publisher, info); + postDynamicFieldThread.start(); + + publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); + PutDynamicFieldThread putDynamicFieldThread = new PutDynamicFieldThread(publisher, info); + putDynamicFieldThread.start(); + + publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); + PostFieldTypeThread postFieldTypeThread = new PostFieldTypeThread(publisher, info); + postFieldTypeThread.start(); + + publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); + PutFieldTypeThread putFieldTypeThread = new PutFieldTypeThread(publisher, info); + putFieldTypeThread.start(); + + postFieldThread.join(); + putFieldThread.join(); + postDynamicFieldThread.join(); + putDynamicFieldThread.join(); + postFieldTypeThread.join(); + putFieldTypeThread.join(); + + String[] expectedAddFields = getExpectedFieldResponses(info); + String[] expectedAddFieldTypes = getExpectedFieldTypeResponses(info); + String[] expectedAddDynamicFields = getExpectedDynamicFieldResponses(info); + + boolean success = false; + long maxTimeoutMillis = 100000; + long startTime = System.nanoTime(); + String request = null; + String response = null; + String result = null; + + while ( ! success + && TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) { + Thread.sleep(10); + + // int j = 0; + for (RestTestHarness client : restTestHarnesses) { + // System.err.println("###CHECKING HARNESS: " + j++ + " for iteration: " + i); + + // verify addFieldPuts and addFieldPosts + request = "/schema/fields?wt=xml"; + response = client.query(request); + //System.err.println("###RESPONSE: " + response); + result = BaseTestHarness.validateXPath(response, expectedAddFields); + + if (result != null) { + // System.err.println("###FAILURE!"); + break; + } + + // verify addDynamicFieldPuts and addDynamicFieldPosts + request = "/schema/dynamicfields?wt=xml"; + response = client.query(request); + //System.err.println("###RESPONSE: " + response); + result = BaseTestHarness.validateXPath(response, expectedAddDynamicFields); + + if (result != null) { + // System.err.println("###FAILURE!"); + break; + } + + request = "/schema/fieldtypes?wt=xml"; + response = client.query(request); + //System.err.println("###RESPONSE: " + response); + result = BaseTestHarness.validateXPath(response, expectedAddFieldTypes); + + if (result != null) { + // System.err.println("###FAILURE!"); + break; + } + + } + success = (result == null); + } + if ( ! success) { + String msg = "QUERY FAILED: xpath=" + result + " request=" + request + " response=" + response; + log.error(msg); + fail(msg); + } + } + } + + private static class CopyFieldInfo { + private String sourceField; + private String destField; + + public CopyFieldInfo(String sourceField, String destField) { + this.sourceField = sourceField; + this.destField = destField; + } + + public String getSourceField() { return sourceField; } + public String getDestField() { return destField; } + } +} diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java b/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java index de774f73807..b479024feb1 100644 --- a/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java +++ b/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java @@ -105,7 +105,7 @@ public class TestCloudSchemaless extends AbstractFullDistribZkTestBase { // This tests that the replicas properly handle schema additions. int slices = getCommonCloudSolrClient().getZkStateReader().getClusterState() - .getActiveSlices("collection1").size(); + .getCollection("collection1").getActiveSlices().size(); int trials = 50; // generate enough docs so that we can expect at least a doc per slice int numDocsPerTrial = (int)(slices * (Math.log(slices) + 1)); diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java index 65bd81b80d4..941eb098748 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java @@ -89,20 +89,6 @@ public class ClusterState implements JSONWriter.Writable { return result; } - - /** - * Get the lead replica for specific collection, or null if one currently doesn't exist. - * @deprecated Use {@link DocCollection#getLeader(String)} instead - */ - @Deprecated - public Replica getLeader(String collection, String sliceName) { - DocCollection coll = getCollectionOrNull(collection); - if (coll == null) return null; - Slice slice = coll.getSlice(sliceName); - if (slice == null) return null; - return slice.getLeader(); - } - /** * Returns true if the specified collection name exists, false otherwise. * @@ -115,77 +101,6 @@ public class ClusterState implements JSONWriter.Writable { return getCollectionOrNull(collectionName) != null; } - /** - * Gets the replica by the core node name (assuming the slice is unknown) or null if replica is not found. - * If the slice is known, do not use this method. - * coreNodeName is the same as replicaName - * - * @deprecated use {@link DocCollection#getReplica(String)} instead - */ - @Deprecated - public Replica getReplica(final String collection, final String coreNodeName) { - DocCollection coll = getCollectionOrNull(collection); - if (coll == null) return null; - for (Slice slice : coll.getSlices()) { - Replica replica = slice.getReplica(coreNodeName); - if (replica != null) return replica; - } - return null; - } - - /** - * Get the named Slice for collection, or null if not found. - * - * @deprecated use {@link DocCollection#getSlice(String)} instead - */ - @Deprecated - public Slice getSlice(String collection, String sliceName) { - DocCollection coll = getCollectionOrNull(collection); - if (coll == null) return null; - return coll.getSlice(sliceName); - } - - /** - * @deprecated use {@link DocCollection#getSlicesMap()} instead - */ - @Deprecated - public Map getSlicesMap(String collection) { - DocCollection coll = getCollectionOrNull(collection); - if (coll == null) return null; - return coll.getSlicesMap(); - } - - /** - * @deprecated use {@link DocCollection#getActiveSlicesMap()} instead - */ - @Deprecated - public Map getActiveSlicesMap(String collection) { - DocCollection coll = getCollectionOrNull(collection); - if (coll == null) return null; - return coll.getActiveSlicesMap(); - } - - /** - * @deprecated use {@link DocCollection#getSlices()} instead - */ - @Deprecated - public Collection getSlices(String collection) { - DocCollection coll = getCollectionOrNull(collection); - if (coll == null) return null; - return coll.getSlices(); - } - - /** - * @deprecated use {@link DocCollection#getActiveSlices()} instead - */ - @Deprecated - public Collection getActiveSlices(String collection) { - DocCollection coll = getCollectionOrNull(collection); - if (coll == null) return null; - return coll.getActiveSlices(); - } - - /** * Get the named DocCollection object, or throw an exception if it doesn't exist. */ @@ -213,27 +128,6 @@ public class ClusterState implements JSONWriter.Writable { return ref == null ? null : ref.get(); } - /** - * Get collection names. - * - * Implementation note: This method resolves the collection reference by calling - * {@link CollectionRef#get()} which can make a call to ZooKeeper. This is necessary - * because the semantics of how collection list is loaded have changed in SOLR-6629. - * Please see javadocs in {@link ZkStateReader#refreshCollectionList(Watcher)} - * - * @deprecated use {@link #getCollectionsMap()} to avoid a second lookup for lazy collections - */ - @Deprecated - public Set getCollections() { - Set result = new HashSet<>(); - for (Entry entry : collectionStates.entrySet()) { - if (entry.getValue().get() != null) { - result.add(entry.getKey()); - } - } - return result; - } - /** * Get a map of collection name vs DocCollection objects * @@ -397,6 +291,7 @@ public class ClusterState implements JSONWriter.Writable { * * @return null if ClusterState was created for publication, not consumption * @deprecated true cluster state spans many ZK nodes, stop depending on the version number of the shared node! + * will be removed in 8.0 */ @Deprecated public Integer getZkClusterStateVersion() { diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterStateUtil.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterStateUtil.java index 10ff5e14fdb..bbdc5fadb0e 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterStateUtil.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterStateUtil.java @@ -218,7 +218,7 @@ public class ClusterStateUtil { public static int getLiveAndActiveReplicaCount(ZkStateReader zkStateReader, String collection) { Collection slices; - slices = zkStateReader.getClusterState().getActiveSlices(collection); + slices = zkStateReader.getClusterState().getCollection(collection).getActiveSlices(); int liveAndActive = 0; for (Slice slice : slices) { for (Replica replica : slice.getReplicas()) { diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java index c01f15b26bd..0a7d76fa1f0 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java @@ -596,7 +596,7 @@ public class ZkStateReader implements Closeable { * In fact this is a clever way to avoid doing a ZK exists check on * the /collections/collection_name/state.json znode * Such an exists check is done in {@link ClusterState#hasCollection(String)} and - * {@link ClusterState#getCollections()} and {@link ClusterState#getCollectionsMap()} methods + * {@link ClusterState#getCollectionsMap()} methods * have a safeguard against exposing wrong collection names to the users */ private void refreshCollectionList(Watcher watcher) throws KeeperException, InterruptedException { @@ -799,12 +799,13 @@ public class ZkStateReader implements Closeable { if (clusterState == null) { return null; } - Map slices = clusterState.getSlicesMap(collection); - if (slices == null) { + final DocCollection docCollection = clusterState.getCollectionOrNull(collection); + if (docCollection == null || docCollection.getSlicesMap() == null) { throw new ZooKeeperException(ErrorCode.BAD_REQUEST, "Could not find collection in zk: " + collection); } + Map slices = docCollection.getSlicesMap(); Slice replicas = slices.get(shardId); if (replicas == null) { throw new ZooKeeperException(ErrorCode.BAD_REQUEST, "Could not find shardId in zk: " + shardId); diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java index b7c41629e2f..5f0e596af67 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java @@ -151,7 +151,9 @@ public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTes if (verbose) System.out.println("-"); boolean sawLiveRecovering = false; ClusterState clusterState = zkStateReader.getClusterState(); - Map slices = clusterState.getSlicesMap(collection); + final DocCollection docCollection = clusterState.getCollectionOrNull(collection); + assertNotNull("Could not find collection:" + collection, docCollection); + Map slices = docCollection.getSlicesMap(); assertNotNull("Could not find collection:" + collection, slices); for (Map.Entry entry : slices.entrySet()) { Slice slice = entry.getValue(); @@ -254,8 +256,9 @@ public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTes int maxIterations = 100; Replica.State coreState = null; while(maxIterations-->0) { - Slice slice = reader.getClusterState().getSlice(collection, shard); - if(slice!=null) { + final DocCollection docCollection = reader.getClusterState().getCollectionOrNull(collection); + if(docCollection != null && docCollection.getSlice(shard)!=null) { + Slice slice = docCollection.getSlice(shard); Replica replica = slice.getReplicasMap().get(coreNodeName); if (replica != null) { coreState = replica.getState(); @@ -274,10 +277,12 @@ public abstract class AbstractDistribZkTestBase extends BaseDistributedSearchTes zkStateReader.forceUpdateCollection(collection); ClusterState clusterState = zkStateReader.getClusterState(); - Map slices = clusterState.getSlicesMap(collection); - if (slices == null) { + final DocCollection docCollection = clusterState.getCollectionOrNull(collection); + if (docCollection == null || docCollection.getSlices() == null) { throw new IllegalArgumentException("Cannot find collection:" + collection); } + + Map slices = docCollection.getSlicesMap(); for (Map.Entry entry : slices.entrySet()) { Slice slice = entry.getValue(); if (slice.getState() != Slice.State.ACTIVE) { diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java index 2ac5dd4cd05..90d5bd9315f 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java @@ -351,11 +351,12 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes Thread.sleep(500); } cnt = 30; - while (reader.getClusterState().getSlices(collection).size() < slices) { + + while (reader.getClusterState().getCollection(collection).getSlices().size() < slices) { if (cnt == 0) { throw new RuntimeException("timeout waiting for collection shards to come up: collection="+collection - + ", slices.expected="+slices+ " slices.actual= " + reader.getClusterState().getSlices(collection).size() - + " slices : "+ reader.getClusterState().getSlices(collection) ); + + ", slices.expected="+slices+ " slices.actual= " + reader.getClusterState().getCollection(collection).getSlices().size() + + " slices : "+ reader.getClusterState().getCollection(collection).getSlices() ); } cnt--; Thread.sleep(500); @@ -874,11 +875,11 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes protected ZkCoreNodeProps getLeaderUrlFromZk(String collection, String slice) { ClusterState clusterState = getCommonCloudSolrClient().getZkStateReader().getClusterState(); - ZkNodeProps leader = clusterState.getLeader(collection, slice); - if (leader == null) { - throw new RuntimeException("Could not find leader:" + collection + " " + slice); + final DocCollection docCollection = clusterState.getCollectionOrNull(collection); + if (docCollection != null && docCollection.getLeader(slice) != null) { + return new ZkCoreNodeProps(docCollection.getLeader(slice)); } - return new ZkCoreNodeProps(leader); + throw new RuntimeException("Could not find leader:" + collection + " " + slice); } @Override @@ -1169,7 +1170,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes assertEquals( "The client count does not match up with the shard count for slice:" + shard, - zkStateReader.getClusterState().getSlice(DEFAULT_COLLECTION, shard) + zkStateReader.getClusterState().getCollection(DEFAULT_COLLECTION).getSlice(shard) .getReplicasMap().size(), solrJetties.size()); CloudJettyRunner lastJetty = null; @@ -1403,7 +1404,8 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes AbstractZkTestCase.TIMEOUT)) { zk.createClusterStateWatchersAndUpdate(); clusterState = zk.getClusterState(); - slices = clusterState.getSlicesMap(DEFAULT_COLLECTION); + final DocCollection docCollection = clusterState.getCollectionOrNull(DEFAULT_COLLECTION); + slices = (docCollection != null) ? docCollection.getSlicesMap() : null; } if (slices == null) { @@ -1949,7 +1951,7 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes zkr.forceUpdateCollection(testCollectionName); // force the state to be fresh ClusterState cs = zkr.getClusterState(); - Collection slices = cs.getActiveSlices(testCollectionName); + Collection slices = cs.getCollection(testCollectionName).getActiveSlices(); assertTrue(slices.size() == shards); boolean allReplicasUp = false; long waitMs = 0L; @@ -1958,7 +1960,9 @@ public abstract class AbstractFullDistribZkTestBase extends AbstractDistribZkTes while (waitMs < maxWaitMs && !allReplicasUp) { cs = cloudClient.getZkStateReader().getClusterState(); assertNotNull(cs); - Slice shard = cs.getSlice(testCollectionName, shardId); + final DocCollection docCollection = cs.getCollectionOrNull(testCollectionName); + assertNotNull("No collection found for " + testCollectionName, docCollection); + Slice shard = docCollection.getSlice(shardId); assertNotNull("No Slice for "+shardId, shard); allReplicasUp = true; // assume true Collection replicas = shard.getReplicas(); diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java index bdbbdd2b6f8..86f89069c42 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/ChaosMonkey.java @@ -333,7 +333,7 @@ public class ChaosMonkey { } private String getRandomSlice() { - Map slices = zkStateReader.getClusterState().getSlicesMap(collection); + Map slices = zkStateReader.getClusterState().getCollection(collection).getSlicesMap(); List sliceKeyList = new ArrayList<>(slices.size()); sliceKeyList.addAll(slices.keySet()); From 292fca651a169e6f653567023951c92b63708c54 Mon Sep 17 00:00:00 2001 From: Cao Manh Dat Date: Tue, 1 Aug 2017 15:17:20 +0700 Subject: [PATCH 41/95] SOLR-9321: Remove tests, they were accidentally added because of cherry-pick --- .../solr/cloud/TestMiniSolrCloudCluster.java | 389 ---------- .../TestCloudManagedSchemaConcurrent.java | 717 ------------------ 2 files changed, 1106 deletions(-) delete mode 100644 solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java delete mode 100644 solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java diff --git a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java b/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java deleted file mode 100644 index d4a131bc02c..00000000000 --- a/solr/core/src/test/org/apache/solr/cloud/TestMiniSolrCloudCluster.java +++ /dev/null @@ -1,389 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.cloud; - -import java.lang.invoke.MethodHandles; -import java.net.URL; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; - -import com.carrotsearch.randomizedtesting.rules.SystemPropertiesRestoreRule; -import org.apache.lucene.util.LuceneTestCase; -import org.apache.lucene.util.LuceneTestCase.SuppressSysoutChecks; -import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.embedded.JettyConfig; -import org.apache.solr.client.solrj.embedded.JettyConfig.Builder; -import org.apache.solr.client.solrj.embedded.JettySolrRunner; -import org.apache.solr.client.solrj.impl.CloudSolrClient; -import org.apache.solr.client.solrj.request.CollectionAdminRequest; -import org.apache.solr.client.solrj.response.QueryResponse; -import org.apache.solr.common.SolrInputDocument; -import org.apache.solr.common.cloud.ClusterState; -import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.ZkStateReader; -import org.apache.solr.core.CoreDescriptor; -import org.apache.solr.index.TieredMergePolicyFactory; -import org.apache.solr.util.RevertDefaultThreadHandlerRule; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.RuleChain; -import org.junit.rules.TestRule; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Test of the MiniSolrCloudCluster functionality. Keep in mind, - * MiniSolrCloudCluster is designed to be used outside of the Lucene test - * hierarchy. - */ -@SuppressSysoutChecks(bugUrl = "Solr logs to JUL") -public class TestMiniSolrCloudCluster extends LuceneTestCase { - - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - protected int NUM_SERVERS = 5; - protected int NUM_SHARDS = 2; - protected int REPLICATION_FACTOR = 2; - - public TestMiniSolrCloudCluster () { - NUM_SERVERS = 5; - NUM_SHARDS = 2; - REPLICATION_FACTOR = 2; - } - - @BeforeClass - public static void setupHackNumerics() { // SOLR-10916 - SolrTestCaseJ4.randomizeNumericTypesProperties(); - } - @AfterClass - public static void clearHackNumerics() { // SOLR-10916 - SolrTestCaseJ4.clearNumericTypesProperties(); - } - - @Rule - public TestRule solrTestRules = RuleChain - .outerRule(new SystemPropertiesRestoreRule()); - - @ClassRule - public static TestRule solrClassRules = RuleChain.outerRule( - new SystemPropertiesRestoreRule()).around( - new RevertDefaultThreadHandlerRule()); - - private MiniSolrCloudCluster createMiniSolrCloudCluster() throws Exception { - Builder jettyConfig = JettyConfig.builder(); - jettyConfig.waitForLoadingCoresToFinish(null); - return new MiniSolrCloudCluster(NUM_SERVERS, createTempDir(), jettyConfig.build()); - } - - private void createCollection(MiniSolrCloudCluster miniCluster, String collectionName, String createNodeSet, String asyncId, - Boolean indexToPersist, Map collectionProperties) throws Exception { - String configName = "solrCloudCollectionConfig"; - miniCluster.uploadConfigSet(SolrTestCaseJ4.TEST_PATH().resolve("collection1").resolve("conf"), configName); - - final boolean persistIndex = (indexToPersist != null ? indexToPersist.booleanValue() : random().nextBoolean()); - if (collectionProperties == null) { - collectionProperties = new HashMap<>(); - } - collectionProperties.putIfAbsent(CoreDescriptor.CORE_CONFIG, "solrconfig-tlog.xml"); - collectionProperties.putIfAbsent("solr.tests.maxBufferedDocs", "100000"); - collectionProperties.putIfAbsent("solr.tests.ramBufferSizeMB", "100"); - // use non-test classes so RandomizedRunner isn't necessary - collectionProperties.putIfAbsent(SolrTestCaseJ4.SYSTEM_PROPERTY_SOLR_TESTS_MERGEPOLICYFACTORY, TieredMergePolicyFactory.class.getName()); - collectionProperties.putIfAbsent("solr.tests.mergeScheduler", "org.apache.lucene.index.ConcurrentMergeScheduler"); - collectionProperties.putIfAbsent("solr.directoryFactory", (persistIndex ? "solr.StandardDirectoryFactory" : "solr.RAMDirectoryFactory")); - - if (asyncId == null) { - CollectionAdminRequest.createCollection(collectionName, configName, NUM_SHARDS, REPLICATION_FACTOR) - .setCreateNodeSet(createNodeSet) - .setProperties(collectionProperties) - .process(miniCluster.getSolrClient()); - } - else { - CollectionAdminRequest.createCollection(collectionName, configName, NUM_SHARDS, REPLICATION_FACTOR) - .setCreateNodeSet(createNodeSet) - .setProperties(collectionProperties) - .processAndWait(miniCluster.getSolrClient(), 30); - } - } - - @Test - public void testCollectionCreateSearchDelete() throws Exception { - - final String collectionName = "testcollection"; - MiniSolrCloudCluster miniCluster = createMiniSolrCloudCluster(); - - final CloudSolrClient cloudSolrClient = miniCluster.getSolrClient(); - - try { - assertNotNull(miniCluster.getZkServer()); - List jettys = miniCluster.getJettySolrRunners(); - assertEquals(NUM_SERVERS, jettys.size()); - for (JettySolrRunner jetty : jettys) { - assertTrue(jetty.isRunning()); - } - - // shut down a server - log.info("#### Stopping a server"); - JettySolrRunner stoppedServer = miniCluster.stopJettySolrRunner(0); - assertTrue(stoppedServer.isStopped()); - assertEquals(NUM_SERVERS - 1, miniCluster.getJettySolrRunners().size()); - - // create a server - log.info("#### Starting a server"); - JettySolrRunner startedServer = miniCluster.startJettySolrRunner(); - assertTrue(startedServer.isRunning()); - assertEquals(NUM_SERVERS, miniCluster.getJettySolrRunners().size()); - - // create collection - log.info("#### Creating a collection"); - final String asyncId = (random().nextBoolean() ? null : "asyncId("+collectionName+".create)="+random().nextInt()); - createCollection(miniCluster, collectionName, null, asyncId, null, null); - - ZkStateReader zkStateReader = miniCluster.getSolrClient().getZkStateReader(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); - - // modify/query collection - log.info("#### updating a querying collection"); - cloudSolrClient.setDefaultCollection(collectionName); - SolrInputDocument doc = new SolrInputDocument(); - doc.setField("id", "1"); - cloudSolrClient.add(doc); - cloudSolrClient.commit(); - SolrQuery query = new SolrQuery(); - query.setQuery("*:*"); - QueryResponse rsp = cloudSolrClient.query(query); - assertEquals(1, rsp.getResults().getNumFound()); - - // remove a server not hosting any replicas - zkStateReader.forceUpdateCollection(collectionName); - ClusterState clusterState = zkStateReader.getClusterState(); - HashMap jettyMap = new HashMap(); - for (JettySolrRunner jetty : miniCluster.getJettySolrRunners()) { - String key = jetty.getBaseUrl().toString().substring((jetty.getBaseUrl().getProtocol() + "://").length()); - jettyMap.put(key, jetty); - } - Collection slices = clusterState.getCollection(collectionName).getSlices(); - // track the servers not host repliacs - for (Slice slice : slices) { - jettyMap.remove(slice.getLeader().getNodeName().replace("_solr", "/solr")); - for (Replica replica : slice.getReplicas()) { - jettyMap.remove(replica.getNodeName().replace("_solr", "/solr")); - } - } - assertTrue("Expected to find a node without a replica", jettyMap.size() > 0); - log.info("#### Stopping a server"); - JettySolrRunner jettyToStop = jettyMap.entrySet().iterator().next().getValue(); - jettys = miniCluster.getJettySolrRunners(); - for (int i = 0; i < jettys.size(); ++i) { - if (jettys.get(i).equals(jettyToStop)) { - miniCluster.stopJettySolrRunner(i); - assertEquals(NUM_SERVERS - 1, miniCluster.getJettySolrRunners().size()); - } - } - - // re-create a server (to restore original NUM_SERVERS count) - log.info("#### Starting a server"); - startedServer = miniCluster.startJettySolrRunner(jettyToStop); - assertTrue(startedServer.isRunning()); - assertEquals(NUM_SERVERS, miniCluster.getJettySolrRunners().size()); - - CollectionAdminRequest.deleteCollection(collectionName).process(miniCluster.getSolrClient()); - - // create it again - String asyncId2 = (random().nextBoolean() ? null : "asyncId("+collectionName+".create)="+random().nextInt()); - createCollection(miniCluster, collectionName, null, asyncId2, null, null); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); - - // check that there's no left-over state - assertEquals(0, cloudSolrClient.query(new SolrQuery("*:*")).getResults().getNumFound()); - cloudSolrClient.add(doc); - cloudSolrClient.commit(); - assertEquals(1, cloudSolrClient.query(new SolrQuery("*:*")).getResults().getNumFound()); - - } - finally { - miniCluster.shutdown(); - } - } - - @Test - public void testCollectionCreateWithoutCoresThenDelete() throws Exception { - - final String collectionName = "testSolrCloudCollectionWithoutCores"; - final MiniSolrCloudCluster miniCluster = createMiniSolrCloudCluster(); - final CloudSolrClient cloudSolrClient = miniCluster.getSolrClient(); - - try { - assertNotNull(miniCluster.getZkServer()); - assertFalse(miniCluster.getJettySolrRunners().isEmpty()); - - // create collection - final String asyncId = (random().nextBoolean() ? null : "asyncId("+collectionName+".create)="+random().nextInt()); - createCollection(miniCluster, collectionName, OverseerCollectionMessageHandler.CREATE_NODE_SET_EMPTY, asyncId, null, null); - - try (SolrZkClient zkClient = new SolrZkClient - (miniCluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); - ZkStateReader zkStateReader = new ZkStateReader(zkClient)) { - zkStateReader.createClusterStateWatchersAndUpdate(); - - // wait for collection to appear - AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); - - // check the collection's corelessness - { - int coreCount = 0; - for (Map.Entry entry : zkStateReader.getClusterState() - .getCollection(collectionName).getSlicesMap().entrySet()) { - coreCount += entry.getValue().getReplicasMap().entrySet().size(); - } - assertEquals(0, coreCount); - } - - } - } - finally { - miniCluster.shutdown(); - } - } - - @Test - public void testStopAllStartAll() throws Exception { - - final String collectionName = "testStopAllStartAllCollection"; - - final MiniSolrCloudCluster miniCluster = createMiniSolrCloudCluster(); - - try { - assertNotNull(miniCluster.getZkServer()); - List jettys = miniCluster.getJettySolrRunners(); - assertEquals(NUM_SERVERS, jettys.size()); - for (JettySolrRunner jetty : jettys) { - assertTrue(jetty.isRunning()); - } - - createCollection(miniCluster, collectionName, null, null, Boolean.TRUE, null); - final CloudSolrClient cloudSolrClient = miniCluster.getSolrClient(); - cloudSolrClient.setDefaultCollection(collectionName); - final SolrQuery query = new SolrQuery("*:*"); - final SolrInputDocument doc = new SolrInputDocument(); - - try (SolrZkClient zkClient = new SolrZkClient - (miniCluster.getZkServer().getZkAddress(), AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null); - ZkStateReader zkStateReader = new ZkStateReader(zkClient)) { - zkStateReader.createClusterStateWatchersAndUpdate(); - AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); - - // modify collection - final int numDocs = 1 + random().nextInt(10); - for (int ii = 1; ii <= numDocs; ++ii) { - doc.setField("id", ""+ii); - cloudSolrClient.add(doc); - if (ii*2 == numDocs) cloudSolrClient.commit(); - } - cloudSolrClient.commit(); - // query collection - { - final QueryResponse rsp = cloudSolrClient.query(query); - assertEquals(numDocs, rsp.getResults().getNumFound()); - } - - // the test itself - zkStateReader.forceUpdateCollection(collectionName); - final ClusterState clusterState = zkStateReader.getClusterState(); - - final HashSet leaderIndices = new HashSet(); - final HashSet followerIndices = new HashSet(); - { - final HashMap shardLeaderMap = new HashMap(); - for (final Slice slice : clusterState.getCollection(collectionName).getSlices()) { - for (final Replica replica : slice.getReplicas()) { - shardLeaderMap.put(replica.getNodeName().replace("_solr", "/solr"), Boolean.FALSE); - } - shardLeaderMap.put(slice.getLeader().getNodeName().replace("_solr", "/solr"), Boolean.TRUE); - } - for (int ii = 0; ii < jettys.size(); ++ii) { - final URL jettyBaseUrl = jettys.get(ii).getBaseUrl(); - final String jettyBaseUrlString = jettyBaseUrl.toString().substring((jettyBaseUrl.getProtocol() + "://").length()); - final Boolean isLeader = shardLeaderMap.get(jettyBaseUrlString); - if (Boolean.TRUE.equals(isLeader)) { - leaderIndices.add(new Integer(ii)); - } else if (Boolean.FALSE.equals(isLeader)) { - followerIndices.add(new Integer(ii)); - } // else neither leader nor follower i.e. node without a replica (for our collection) - } - } - final List leaderIndicesList = new ArrayList(leaderIndices); - final List followerIndicesList = new ArrayList(followerIndices); - - // first stop the followers (in no particular order) - Collections.shuffle(followerIndicesList, random()); - for (Integer ii : followerIndicesList) { - if (!leaderIndices.contains(ii)) { - miniCluster.stopJettySolrRunner(jettys.get(ii.intValue())); - } - } - - // then stop the leaders (again in no particular order) - Collections.shuffle(leaderIndicesList, random()); - for (Integer ii : leaderIndicesList) { - miniCluster.stopJettySolrRunner(jettys.get(ii.intValue())); - } - - // calculate restart order - final List restartIndicesList = new ArrayList(); - Collections.shuffle(leaderIndicesList, random()); - restartIndicesList.addAll(leaderIndicesList); - Collections.shuffle(followerIndicesList, random()); - restartIndicesList.addAll(followerIndicesList); - if (random().nextBoolean()) Collections.shuffle(restartIndicesList, random()); - - // and then restart jettys in that order - for (Integer ii : restartIndicesList) { - final JettySolrRunner jetty = jettys.get(ii.intValue()); - if (!jetty.isRunning()) { - miniCluster.startJettySolrRunner(jetty); - assertTrue(jetty.isRunning()); - } - } - AbstractDistribZkTestBase.waitForRecoveriesToFinish(collectionName, zkStateReader, true, true, 330); - - zkStateReader.forceUpdateCollection(collectionName); - - // re-query collection - { - final QueryResponse rsp = cloudSolrClient.query(query); - assertEquals(numDocs, rsp.getResults().getNumFound()); - } - - } - } - finally { - miniCluster.shutdown(); - } - } - -} diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java b/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java deleted file mode 100644 index f21df6c94cb..00000000000 --- a/solr/core/src/test/org/apache/solr/schema/TestCloudManagedSchemaConcurrent.java +++ /dev/null @@ -1,717 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.schema; -import java.lang.invoke.MethodHandles; -import java.util.ArrayList; -import java.util.List; -import java.util.SortedMap; -import java.util.TreeMap; -import java.util.concurrent.TimeUnit; - -import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.embedded.JettySolrRunner; -import org.apache.solr.client.solrj.impl.HttpSolrClient; -import org.apache.solr.cloud.AbstractFullDistribZkTestBase; -import org.apache.solr.common.cloud.ClusterState; -import org.apache.solr.common.cloud.Replica; -import org.apache.solr.common.cloud.Slice; -import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.ZkCoreNodeProps; -import org.apache.solr.util.BaseTestHarness; -import org.apache.solr.util.RestTestHarness; -import org.apache.zookeeper.data.Stat; -import org.eclipse.jetty.servlet.ServletHolder; -import org.junit.BeforeClass; -import org.junit.Ignore; -import org.junit.Test; -import org.restlet.ext.servlet.ServerServlet; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -@Ignore -public class TestCloudManagedSchemaConcurrent extends AbstractFullDistribZkTestBase { - private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - private static final String SUCCESS_XPATH = "/response/lst[@name='responseHeader']/int[@name='status'][.='0']"; - private static final String PUT_DYNAMIC_FIELDNAME = "newdynamicfieldPut"; - private static final String POST_DYNAMIC_FIELDNAME = "newdynamicfieldPost"; - private static final String PUT_FIELDNAME = "newfieldPut"; - private static final String POST_FIELDNAME = "newfieldPost"; - private static final String PUT_FIELDTYPE = "newfieldtypePut"; - private static final String POST_FIELDTYPE = "newfieldtypePost"; - - public TestCloudManagedSchemaConcurrent() { - super(); - sliceCount = 4; - } - - @BeforeClass - public static void initSysProperties() { - System.setProperty("managed.schema.mutable", "true"); - System.setProperty("enable.update.log", "true"); - } - - @Override - public void distribTearDown() throws Exception { - super.distribTearDown(); - for (RestTestHarness h : restTestHarnesses) { - h.close(); - } - } - - @Override - protected String getCloudSolrConfig() { - return "solrconfig-managed-schema.xml"; - } - - @Override - public SortedMap getExtraServlets() { - final SortedMap extraServlets = new TreeMap<>(); - final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class); - solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi"); - extraServlets.put(solrRestApi, "/schema/*"); // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...' - return extraServlets; - } - - private List restTestHarnesses = new ArrayList<>(); - - private void setupHarnesses() { - for (final SolrClient client : clients) { - RestTestHarness harness = new RestTestHarness(() -> ((HttpSolrClient)client).getBaseURL()); - restTestHarnesses.add(harness); - } - } - - private static void verifySuccess(String request, String response) throws Exception { - String result = BaseTestHarness.validateXPath(response, SUCCESS_XPATH); - if (null != result) { - String msg = "QUERY FAILED: xpath=" + result + " request=" + request + " response=" + response; - log.error(msg); - fail(msg); - } - } - - private static void addFieldPut(RestTestHarness publisher, String fieldName, int updateTimeoutSecs) throws Exception { - final String content = "{\"type\":\"text\",\"stored\":\"false\"}"; - String request = "/schema/fields/" + fieldName + "?wt=xml"; - if (updateTimeoutSecs > 0) - request += "&updateTimeoutSecs="+updateTimeoutSecs; - String response = publisher.put(request, content); - verifySuccess(request, response); - } - - private static void addFieldPost(RestTestHarness publisher, String fieldName, int updateTimeoutSecs) throws Exception { - final String content = "[{\"name\":\""+fieldName+"\",\"type\":\"text\",\"stored\":\"false\"}]"; - String request = "/schema/fields/?wt=xml"; - if (updateTimeoutSecs > 0) - request += "&updateTimeoutSecs="+updateTimeoutSecs; - String response = publisher.post(request, content); - verifySuccess(request, response); - } - - private static void addDynamicFieldPut(RestTestHarness publisher, String dynamicFieldPattern, int updateTimeoutSecs) throws Exception { - final String content = "{\"type\":\"text\",\"stored\":\"false\"}"; - String request = "/schema/dynamicfields/" + dynamicFieldPattern + "?wt=xml"; - if (updateTimeoutSecs > 0) - request += "&updateTimeoutSecs="+updateTimeoutSecs; - String response = publisher.put(request, content); - verifySuccess(request, response); - } - - private static void addDynamicFieldPost(RestTestHarness publisher, String dynamicFieldPattern, int updateTimeoutSecs) throws Exception { - final String content = "[{\"name\":\""+dynamicFieldPattern+"\",\"type\":\"text\",\"stored\":\"false\"}]"; - String request = "/schema/dynamicfields/?wt=xml"; - if (updateTimeoutSecs > 0) - request += "&updateTimeoutSecs="+updateTimeoutSecs; - String response = publisher.post(request, content); - verifySuccess(request, response); - } - - private static void copyField(RestTestHarness publisher, String source, String dest, int updateTimeoutSecs) throws Exception { - final String content = "[{\"source\":\""+source+"\",\"dest\":[\""+dest+"\"]}]"; - String request = "/schema/copyfields/?wt=xml"; - if (updateTimeoutSecs > 0) - request += "&updateTimeoutSecs="+updateTimeoutSecs; - String response = publisher.post(request, content); - verifySuccess(request, response); - } - - private static void addFieldTypePut(RestTestHarness publisher, String typeName, int updateTimeoutSecs) throws Exception { - final String content = "{\"class\":\""+RANDOMIZED_NUMERIC_FIELDTYPES.get(Integer.class)+"\"}"; - String request = "/schema/fieldtypes/" + typeName + "?wt=xml"; - if (updateTimeoutSecs > 0) - request += "&updateTimeoutSecs="+updateTimeoutSecs; - String response = publisher.put(request, content); - verifySuccess(request, response); - } - - private static void addFieldTypePost(RestTestHarness publisher, String typeName, int updateTimeoutSecs) throws Exception { - final String content = "[{\"name\":\""+typeName+"\",\"class\":\""+RANDOMIZED_NUMERIC_FIELDTYPES.get(Integer.class)+"\"}]"; - String request = "/schema/fieldtypes/?wt=xml"; - if (updateTimeoutSecs > 0) - request += "&updateTimeoutSecs="+updateTimeoutSecs; - String response = publisher.post(request, content); - verifySuccess(request, response); - } - - private String[] getExpectedFieldResponses(Info info) { - String[] expectedAddFields = new String[1 + info.numAddFieldPuts + info.numAddFieldPosts]; - expectedAddFields[0] = SUCCESS_XPATH; - - for (int i = 0; i < info.numAddFieldPuts; ++i) { - String newFieldName = PUT_FIELDNAME + info.fieldNameSuffix + i; - expectedAddFields[1 + i] - = "/response/arr[@name='fields']/lst/str[@name='name'][.='" + newFieldName + "']"; - } - - for (int i = 0; i < info.numAddFieldPosts; ++i) { - String newFieldName = POST_FIELDNAME + info.fieldNameSuffix + i; - expectedAddFields[1 + info.numAddFieldPuts + i] - = "/response/arr[@name='fields']/lst/str[@name='name'][.='" + newFieldName + "']"; - } - - return expectedAddFields; - } - - private String[] getExpectedDynamicFieldResponses(Info info) { - String[] expectedAddDynamicFields = new String[1 + info.numAddDynamicFieldPuts + info.numAddDynamicFieldPosts]; - expectedAddDynamicFields[0] = SUCCESS_XPATH; - - for (int i = 0; i < info.numAddDynamicFieldPuts; ++i) { - String newDynamicFieldPattern = PUT_DYNAMIC_FIELDNAME + info.fieldNameSuffix + i + "_*"; - expectedAddDynamicFields[1 + i] - = "/response/arr[@name='dynamicFields']/lst/str[@name='name'][.='" + newDynamicFieldPattern + "']"; - } - - for (int i = 0; i < info.numAddDynamicFieldPosts; ++i) { - String newDynamicFieldPattern = POST_DYNAMIC_FIELDNAME + info.fieldNameSuffix + i + "_*"; - expectedAddDynamicFields[1 + info.numAddDynamicFieldPuts + i] - = "/response/arr[@name='dynamicFields']/lst/str[@name='name'][.='" + newDynamicFieldPattern + "']"; - } - - return expectedAddDynamicFields; - } - - private String[] getExpectedCopyFieldResponses(Info info) { - ArrayList expectedCopyFields = new ArrayList<>(); - expectedCopyFields.add(SUCCESS_XPATH); - for (CopyFieldInfo cpi : info.copyFields) { - String expectedSourceName = cpi.getSourceField(); - expectedCopyFields.add - ("/response/arr[@name='copyFields']/lst/str[@name='source'][.='" + expectedSourceName + "']"); - String expectedDestName = cpi.getDestField(); - expectedCopyFields.add - ("/response/arr[@name='copyFields']/lst/str[@name='dest'][.='" + expectedDestName + "']"); - } - - return expectedCopyFields.toArray(new String[expectedCopyFields.size()]); - } - - private String[] getExpectedFieldTypeResponses(Info info) { - String[] expectedAddFieldTypes = new String[1 + info.numAddFieldTypePuts + info.numAddFieldTypePosts]; - expectedAddFieldTypes[0] = SUCCESS_XPATH; - - for (int i = 0; i < info.numAddFieldTypePuts; ++i) { - String newFieldTypeName = PUT_FIELDTYPE + info.fieldNameSuffix + i; - expectedAddFieldTypes[1 + i] - = "/response/arr[@name='fieldTypes']/lst/str[@name='name'][.='" + newFieldTypeName + "']"; - } - - for (int i = 0; i < info.numAddFieldTypePosts; ++i) { - String newFieldTypeName = POST_FIELDTYPE + info.fieldNameSuffix + i; - expectedAddFieldTypes[1 + info.numAddFieldTypePuts + i] - = "/response/arr[@name='fieldTypes']/lst/str[@name='name'][.='" + newFieldTypeName + "']"; - } - - return expectedAddFieldTypes; - } - - - @Test - @ShardsFixed(num = 8) - public void test() throws Exception { - verifyWaitForSchemaUpdateToPropagate(); - setupHarnesses(); - concurrentOperationsTest(); - schemaLockTest(); - } - - private static class Info { - int numAddFieldPuts = 0; - int numAddFieldPosts = 0; - int numAddDynamicFieldPuts = 0; - int numAddDynamicFieldPosts = 0; - int numAddFieldTypePuts = 0; - int numAddFieldTypePosts = 0; - public String fieldNameSuffix; - List copyFields = new ArrayList<>(); - - public Info(String fieldNameSuffix) { - this.fieldNameSuffix = fieldNameSuffix; - } - } - - private enum Operation { - PUT_AddField { - @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { - String fieldname = PUT_FIELDNAME + info.numAddFieldPuts++; - addFieldPut(publisher, fieldname, 15); - } - }, - POST_AddField { - @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { - String fieldname = POST_FIELDNAME + info.numAddFieldPosts++; - addFieldPost(publisher, fieldname, 15); - } - }, - PUT_AddDynamicField { - @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { - addDynamicFieldPut(publisher, PUT_DYNAMIC_FIELDNAME + info.numAddDynamicFieldPuts++ + "_*", 15); - } - }, - POST_AddDynamicField { - @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { - addDynamicFieldPost(publisher, POST_DYNAMIC_FIELDNAME + info.numAddDynamicFieldPosts++ + "_*", 15); - } - }, - POST_AddCopyField { - @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { - String sourceField = null; - String destField = null; - - int sourceType = random().nextInt(3); - if (sourceType == 0) { // existing - sourceField = "name"; - } else if (sourceType == 1) { // newly created - sourceField = "copySource" + fieldNum; - addFieldPut(publisher, sourceField, 15); - } else { // dynamic - sourceField = "*_dynamicSource" + fieldNum + "_t"; - // * only supported if both src and dst use it - destField = "*_dynamicDest" + fieldNum + "_t"; - } - - if (destField == null) { - int destType = random().nextInt(2); - if (destType == 0) { // existing - destField = "title"; - } else { // newly created - destField = "copyDest" + fieldNum; - addFieldPut(publisher, destField, 15); - } - } - copyField(publisher, sourceField, destField, 15); - info.copyFields.add(new CopyFieldInfo(sourceField, destField)); - } - }, - PUT_AddFieldType { - @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { - String typeName = PUT_FIELDTYPE + info.numAddFieldTypePuts++; - addFieldTypePut(publisher, typeName, 15); - } - }, - POST_AddFieldType { - @Override public void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception { - String typeName = POST_FIELDTYPE + info.numAddFieldTypePosts++; - addFieldTypePost(publisher, typeName, 15); - } - }; - - - public abstract void execute(RestTestHarness publisher, int fieldNum, Info info) throws Exception; - - private static final Operation[] VALUES = values(); - public static Operation randomOperation() { - return VALUES[r.nextInt(VALUES.length)]; - } - } - - private void verifyWaitForSchemaUpdateToPropagate() throws Exception { - String testCollectionName = "collection1"; - - ClusterState clusterState = cloudClient.getZkStateReader().getClusterState(); - Replica shard1Leader = clusterState.getCollection(testCollectionName).getLeader("shard1"); - final String coreUrl = (new ZkCoreNodeProps(shard1Leader)).getCoreUrl(); - assertNotNull(coreUrl); - - RestTestHarness harness = new RestTestHarness(() -> coreUrl.endsWith("/") ? coreUrl.substring(0, coreUrl.length()-1) : coreUrl); - try { - addFieldTypePut(harness, "fooInt", 15); - } finally { - harness.close(); - } - - // go into ZK to get the version of the managed schema after the update - SolrZkClient zkClient = cloudClient.getZkStateReader().getZkClient(); - Stat stat = new Stat(); - String znodePath = "/configs/conf1/managed-schema"; - byte[] managedSchemaBytes = zkClient.getData(znodePath, null, stat, false); - int schemaZkVersion = stat.getVersion(); - - // now loop over all replicas and verify each has the same schema version - Replica randomReplicaNotLeader = null; - for (Slice slice : clusterState.getCollection(testCollectionName).getActiveSlices()) { - for (Replica replica : slice.getReplicas()) { - validateZkVersion(replica, schemaZkVersion, 0, false); - - // save a random replica to test zk watcher behavior - if (randomReplicaNotLeader == null && !replica.getName().equals(shard1Leader.getName())) - randomReplicaNotLeader = replica; - } - } - assertNotNull(randomReplicaNotLeader); - - // now update the data and then verify the znode watcher fires correctly - // before an after a zk session expiration (see SOLR-6249) - zkClient.setData(znodePath, managedSchemaBytes, schemaZkVersion, false); - stat = new Stat(); - managedSchemaBytes = zkClient.getData(znodePath, null, stat, false); - int updatedSchemaZkVersion = stat.getVersion(); - assertTrue(updatedSchemaZkVersion > schemaZkVersion); - validateZkVersion(randomReplicaNotLeader, updatedSchemaZkVersion, 2, true); - - // ok - looks like the watcher fired correctly on the replica - // now, expire that replica's zk session and then verify the watcher fires again (after reconnect) - JettySolrRunner randomReplicaJetty = - getJettyOnPort(getReplicaPort(randomReplicaNotLeader)); - assertNotNull(randomReplicaJetty); - chaosMonkey.expireSession(randomReplicaJetty); - - // update the data again to cause watchers to fire - zkClient.setData(znodePath, managedSchemaBytes, updatedSchemaZkVersion, false); - stat = new Stat(); - managedSchemaBytes = zkClient.getData(znodePath, null, stat, false); - updatedSchemaZkVersion = stat.getVersion(); - // give up to 10 secs for the replica to recover after zk session loss and see the update - validateZkVersion(randomReplicaNotLeader, updatedSchemaZkVersion, 10, true); - } - - /** - * Sends a GET request to get the zk schema version from a specific replica. - */ - protected void validateZkVersion(Replica replica, int schemaZkVersion, int waitSecs, boolean retry) throws Exception { - final String replicaUrl = (new ZkCoreNodeProps(replica)).getCoreUrl(); - RestTestHarness testHarness = new RestTestHarness(() -> replicaUrl.endsWith("/") ? replicaUrl.substring(0, replicaUrl.length()-1) : replicaUrl); - try { - long waitMs = waitSecs * 1000L; - if (waitMs > 0) Thread.sleep(waitMs); // wait a moment for the zk watcher to fire - - try { - testHarness.validateQuery("/schema/zkversion?wt=xml", "//zkversion=" + schemaZkVersion); - } catch (Exception exc) { - if (retry) { - // brief wait before retrying - Thread.sleep(waitMs > 0 ? waitMs : 2000L); - - testHarness.validateQuery("/schema/zkversion?wt=xml", "//zkversion=" + schemaZkVersion); - } else { - throw exc; - } - } - } finally { - testHarness.close(); - } - } - - private void concurrentOperationsTest() throws Exception { - - // First, add a bunch of fields and dynamic fields via PUT and POST, as well as copyFields, - // but do it fast enough and verify shards' schemas after all of them are added - int numFields = 100; - Info info = new Info(""); - - for (int fieldNum = 0; fieldNum <= numFields ; ++fieldNum) { - RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); - Operation.randomOperation().execute(publisher, fieldNum, info); - } - - String[] expectedAddFields = getExpectedFieldResponses(info); - String[] expectedAddDynamicFields = getExpectedDynamicFieldResponses(info); - String[] expectedCopyFields = getExpectedCopyFieldResponses(info); - String[] expectedAddFieldTypes = getExpectedFieldTypeResponses(info); - - boolean success = false; - long maxTimeoutMillis = 100000; - long startTime = System.nanoTime(); - String request = null; - String response = null; - String result = null; - - while ( ! success - && TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) { - Thread.sleep(100); - - for (RestTestHarness client : restTestHarnesses) { - // verify addFieldTypePuts and addFieldTypePosts - request = "/schema/fieldtypes?wt=xml"; - response = client.query(request); - result = BaseTestHarness.validateXPath(response, expectedAddFieldTypes); - if (result != null) { - break; - } - - // verify addFieldPuts and addFieldPosts - request = "/schema/fields?wt=xml"; - response = client.query(request); - result = BaseTestHarness.validateXPath(response, expectedAddFields); - if (result != null) { - break; - } - - // verify addDynamicFieldPuts and addDynamicFieldPosts - request = "/schema/dynamicfields?wt=xml"; - response = client.query(request); - result = BaseTestHarness.validateXPath(response, expectedAddDynamicFields); - if (result != null) { - break; - } - - // verify copyFields - request = "/schema/copyfields?wt=xml"; - response = client.query(request); - result = BaseTestHarness.validateXPath(response, expectedCopyFields); - if (result != null) { - break; - } - } - success = (result == null); - } - if ( ! success) { - String msg = "QUERY FAILED: xpath=" + result + " request=" + request + " response=" + response; - log.error(msg); - fail(msg); - } - } - - private abstract class PutPostThread extends Thread { - RestTestHarness harness; - Info info; - public String fieldName; - - public PutPostThread(RestTestHarness harness, Info info) { - this.harness = harness; - this.info = info; - } - - public abstract void run(); - } - - private class PutFieldThread extends PutPostThread { - public PutFieldThread(RestTestHarness harness, Info info) { - super(harness, info); - fieldName = PUT_FIELDNAME + "Thread" + info.numAddFieldPuts++; - } - public void run() { - try { - // don't have the client side wait for all replicas to see the update or that defeats the purpose - // of testing the locking support on the server-side - addFieldPut(harness, fieldName, -1); - } catch (Exception e) { - // log.error("###ACTUAL FAILURE!"); - throw new RuntimeException(e); - } - } - } - - private class PostFieldThread extends PutPostThread { - public PostFieldThread(RestTestHarness harness, Info info) { - super(harness, info); - fieldName = POST_FIELDNAME + "Thread" + info.numAddFieldPosts++; - } - public void run() { - try { - addFieldPost(harness, fieldName, -1); - } catch (Exception e) { - // log.error("###ACTUAL FAILURE!"); - throw new RuntimeException(e); - } - } - } - - private class PutFieldTypeThread extends PutPostThread { - public PutFieldTypeThread(RestTestHarness harness, Info info) { - super(harness, info); - fieldName = PUT_FIELDTYPE + "Thread" + info.numAddFieldTypePuts++; - } - public void run() { - try { - addFieldTypePut(harness, fieldName, -1); - } catch (Exception e) { - // log.error("###ACTUAL FAILURE!"); - throw new RuntimeException(e); - } - } - } - - private class PostFieldTypeThread extends PutPostThread { - public PostFieldTypeThread(RestTestHarness harness, Info info) { - super(harness, info); - fieldName = POST_FIELDTYPE + "Thread" + info.numAddFieldTypePosts++; - } - public void run() { - try { - addFieldTypePost(harness, fieldName, -1); - } catch (Exception e) { - // log.error("###ACTUAL FAILURE!"); - throw new RuntimeException(e); - } - } - } - - private class PutDynamicFieldThread extends PutPostThread { - public PutDynamicFieldThread(RestTestHarness harness, Info info) { - super(harness, info); - fieldName = PUT_FIELDNAME + "Thread" + info.numAddFieldPuts++; - } - public void run() { - try { - addFieldPut(harness, fieldName, -1); - } catch (Exception e) { - // log.error("###ACTUAL FAILURE!"); - throw new RuntimeException(e); - } - } - } - - private class PostDynamicFieldThread extends PutPostThread { - public PostDynamicFieldThread(RestTestHarness harness, Info info) { - super(harness, info); - fieldName = POST_FIELDNAME + "Thread" + info.numAddFieldPosts++; - } - public void run() { - try { - addFieldPost(harness, fieldName, -1); - } catch (Exception e) { - // log.error("###ACTUAL FAILURE!"); - throw new RuntimeException(e); - } - } - } - - private void schemaLockTest() throws Exception { - - // First, add a bunch of fields via PUT and POST, as well as copyFields, - // but do it fast enough and verify shards' schemas after all of them are added - int numFields = 5; - Info info = new Info("Thread"); - - for (int i = 0; i <= numFields ; ++i) { - // System.err.println("###ITERATION: " + i); - RestTestHarness publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); - PostFieldThread postFieldThread = new PostFieldThread(publisher, info); - postFieldThread.start(); - - publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); - PutFieldThread putFieldThread = new PutFieldThread(publisher, info); - putFieldThread.start(); - - publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); - PostDynamicFieldThread postDynamicFieldThread = new PostDynamicFieldThread(publisher, info); - postDynamicFieldThread.start(); - - publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); - PutDynamicFieldThread putDynamicFieldThread = new PutDynamicFieldThread(publisher, info); - putDynamicFieldThread.start(); - - publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); - PostFieldTypeThread postFieldTypeThread = new PostFieldTypeThread(publisher, info); - postFieldTypeThread.start(); - - publisher = restTestHarnesses.get(r.nextInt(restTestHarnesses.size())); - PutFieldTypeThread putFieldTypeThread = new PutFieldTypeThread(publisher, info); - putFieldTypeThread.start(); - - postFieldThread.join(); - putFieldThread.join(); - postDynamicFieldThread.join(); - putDynamicFieldThread.join(); - postFieldTypeThread.join(); - putFieldTypeThread.join(); - - String[] expectedAddFields = getExpectedFieldResponses(info); - String[] expectedAddFieldTypes = getExpectedFieldTypeResponses(info); - String[] expectedAddDynamicFields = getExpectedDynamicFieldResponses(info); - - boolean success = false; - long maxTimeoutMillis = 100000; - long startTime = System.nanoTime(); - String request = null; - String response = null; - String result = null; - - while ( ! success - && TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) < maxTimeoutMillis) { - Thread.sleep(10); - - // int j = 0; - for (RestTestHarness client : restTestHarnesses) { - // System.err.println("###CHECKING HARNESS: " + j++ + " for iteration: " + i); - - // verify addFieldPuts and addFieldPosts - request = "/schema/fields?wt=xml"; - response = client.query(request); - //System.err.println("###RESPONSE: " + response); - result = BaseTestHarness.validateXPath(response, expectedAddFields); - - if (result != null) { - // System.err.println("###FAILURE!"); - break; - } - - // verify addDynamicFieldPuts and addDynamicFieldPosts - request = "/schema/dynamicfields?wt=xml"; - response = client.query(request); - //System.err.println("###RESPONSE: " + response); - result = BaseTestHarness.validateXPath(response, expectedAddDynamicFields); - - if (result != null) { - // System.err.println("###FAILURE!"); - break; - } - - request = "/schema/fieldtypes?wt=xml"; - response = client.query(request); - //System.err.println("###RESPONSE: " + response); - result = BaseTestHarness.validateXPath(response, expectedAddFieldTypes); - - if (result != null) { - // System.err.println("###FAILURE!"); - break; - } - - } - success = (result == null); - } - if ( ! success) { - String msg = "QUERY FAILED: xpath=" + result + " request=" + request + " response=" + response; - log.error(msg); - fail(msg); - } - } - } - - private static class CopyFieldInfo { - private String sourceField; - private String destField; - - public CopyFieldInfo(String sourceField, String destField) { - this.sourceField = sourceField; - this.destField = destField; - } - - public String getSourceField() { return sourceField; } - public String getDestField() { return destField; } - } -} From 9422da26e52730de5d21d52e26e52ec9c8772df5 Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Tue, 1 Aug 2017 11:14:32 +0100 Subject: [PATCH 42/95] SOLR-11140: Remove unused parameter in (private) SolrMetricManager.prepareCloudPlugins method. (Omar Abdelnabi via Christine Poerschke) --- solr/CHANGES.txt | 3 +++ .../org/apache/solr/metrics/SolrMetricManager.java | 13 +++---------- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 21d0789da06..16fd11952e7 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -120,6 +120,9 @@ Other Changes * SOLR-11131: Document 'assert' as a command option in bin/solr, and bin/solr.cmd scripts. (Jason Gerlowski via Anshum Gupta) +* SOLR-11140: Remove unused parameter in (private) SolrMetricManager.prepareCloudPlugins method. + (Omar Abdelnabi via Christine Poerschke) + ================== 7.0.0 ================== Versions of Major Components diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java index c041d1fb02e..8b17c78fe0b 100644 --- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java +++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java @@ -1005,8 +1005,7 @@ public class SolrMetricManager { private List prepareCloudPlugins(PluginInfo[] pluginInfos, String group, String className, Map defaultAttributes, - Map defaultInitArgs, - PluginInfo defaultPlugin) { + Map defaultInitArgs) { List result = new ArrayList<>(); if (pluginInfos == null) { pluginInfos = new PluginInfo[0]; @@ -1021,12 +1020,6 @@ public class SolrMetricManager { result.add(info); } } - if (result.isEmpty() && defaultPlugin != null) { - defaultPlugin = preparePlugin(defaultPlugin, className, defaultAttributes, defaultInitArgs); - if (defaultPlugin != null) { - result.add(defaultPlugin); - } - } return result; } @@ -1077,7 +1070,7 @@ public class SolrMetricManager { String registryName = core.getCoreMetricManager().getRegistryName(); // collect infos and normalize List infos = prepareCloudPlugins(pluginInfos, SolrInfoBean.Group.shard.toString(), SolrShardReporter.class.getName(), - attrs, initArgs, null); + attrs, initArgs); for (PluginInfo info : infos) { try { SolrMetricReporter reporter = loadReporter(registryName, core.getResourceLoader(), info, @@ -1100,7 +1093,7 @@ public class SolrMetricManager { Map initArgs = new HashMap<>(); initArgs.put("period", DEFAULT_CLOUD_REPORTER_PERIOD); List infos = prepareCloudPlugins(pluginInfos, SolrInfoBean.Group.cluster.toString(), SolrClusterReporter.class.getName(), - attrs, initArgs, null); + attrs, initArgs); String registryName = getRegistryName(SolrInfoBean.Group.cluster); for (PluginInfo info : infos) { try { From 59997091c8f991b55899d6feb5d5c05e720f78a7 Mon Sep 17 00:00:00 2001 From: Tommaso Teofili Date: Tue, 1 Aug 2017 15:00:25 +0200 Subject: [PATCH 43/95] LUCENE-7915 - avoid looping over merges on best merge selection --- .../org/apache/lucene/index/TieredMergePolicy.java | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java index c020a6de062..bba57efff39 100644 --- a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java @@ -298,7 +298,7 @@ public class TieredMergePolicy extends MergePolicy { // call size() once per segment and sort by that: Map sizeInBytes = getSegmentSizes(writer, infos.asList()); - Collections.sort(infosSorted, new SegmentByteSizeDescending(sizeInBytes)); + infosSorted.sort(new SegmentByteSizeDescending(sizeInBytes)); // Compute total index bytes & print details about the index long totIndexBytes = 0; @@ -439,9 +439,7 @@ public class TieredMergePolicy extends MergePolicy { } final OneMerge merge = new OneMerge(best); spec.add(merge); - for(SegmentCommitInfo info : merge.segments) { - toBeMerged.add(info); - } + toBeMerged.addAll(merge.segments); if (verbose(writer)) { message(" add merge=" + writer.segString(merge.segments) + " size=" + String.format(Locale.ROOT, "%.3f MB", bestMergeBytes/1024./1024.) + " score=" + String.format(Locale.ROOT, "%.3f", bestScore.getScore()) + " " + bestScore.getExplanation() + (bestTooLarge ? " [max merge]" : ""), writer); @@ -553,7 +551,7 @@ public class TieredMergePolicy extends MergePolicy { return null; } - Collections.sort(eligible, new SegmentByteSizeDescending(sizeInBytes)); + eligible.sort(new SegmentByteSizeDescending(sizeInBytes)); if (verbose(writer)) { message("eligible=" + eligible, writer); @@ -614,7 +612,7 @@ public class TieredMergePolicy extends MergePolicy { // call size() once per segment and sort by that: Map sizeInBytes = getSegmentSizes(writer, infos.asList()); - Collections.sort(eligible, new SegmentByteSizeDescending(sizeInBytes)); + eligible.sort(new SegmentByteSizeDescending(sizeInBytes)); if (verbose(writer)) { message("eligible=" + eligible, writer); From 7d6c154fd9cdb72d808d97ed8f3f99517c1663a8 Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Tue, 1 Aug 2017 09:40:45 -0400 Subject: [PATCH 44/95] Revert "SOLR-10033: Provide a clear exception when attempting to facet with facet.mincount=0 over points fields" This reverts commit 3a405971b9e06e2004e0d66ae1b82f530de969f2. --- solr/CHANGES.txt | 3 -- .../apache/solr/request/NumericFacets.java | 15 +--------- .../org/apache/solr/request/TestFaceting.java | 28 ++----------------- 3 files changed, 3 insertions(+), 43 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 16fd11952e7..8fc3f137d84 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -598,9 +598,6 @@ Other Changes * SOLR-10847: Provide a clear exception when attempting to use the terms component with points fields. (hossman, Steve Rowe) - -* SOLR-10033: Provide a clear exception when attempting to facet with facet.mincount=0 over points fields. - (Steve Rowe) * SOLR-9321: Remove deprecated methods of ClusterState. (Jason Gerlowski, ishan, Cao Manh Dat) diff --git a/solr/core/src/java/org/apache/solr/request/NumericFacets.java b/solr/core/src/java/org/apache/solr/request/NumericFacets.java index f9f38b3dbaf..fd17f1f7397 100644 --- a/solr/core/src/java/org/apache/solr/request/NumericFacets.java +++ b/solr/core/src/java/org/apache/solr/request/NumericFacets.java @@ -43,7 +43,6 @@ import org.apache.lucene.util.CharsRefBuilder; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.StringHelper; -import org.apache.solr.common.SolrException; import org.apache.solr.common.params.FacetParams; import org.apache.solr.common.util.NamedList; import org.apache.solr.schema.FieldType; @@ -179,11 +178,6 @@ final class NumericFacets { if (numericType == null) { throw new IllegalStateException(); } - if (zeros && ft.isPointField()) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Cannot use " + FacetParams.FACET_MINCOUNT + "=0 on field " + sf.getName() + " which is Points-based"); - } - zeros = zeros && !ft.isPointField() && sf.indexed(); // We don't return zeros when using PointFields or when index=false final List leaves = searcher.getIndexReader().leaves(); @@ -413,18 +407,11 @@ final class NumericFacets { private static NamedList getCountsMultiValued(SolrIndexSearcher searcher, DocSet docs, String fieldName, int offset, int limit, int mincount, boolean missing, String sort) throws IOException { // If facet.mincount=0 with PointFields the only option is to get the values from DocValues - // not currently supported. See SOLR-11174 - boolean zeros = mincount <= 0; + // not currently supported. See SOLR-10033 mincount = Math.max(mincount, 1); final SchemaField sf = searcher.getSchema().getField(fieldName); final FieldType ft = sf.getType(); assert sf.multiValued(); - - if (zeros && ft.isPointField()) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, - "Cannot use " + FacetParams.FACET_MINCOUNT + "=0 on field " + sf.getName() + " which is Points-based"); - } - final List leaves = searcher.getIndexReader().leaves(); // 1. accumulate diff --git a/solr/core/src/test/org/apache/solr/request/TestFaceting.java b/solr/core/src/test/org/apache/solr/request/TestFaceting.java index 1d99127d37e..9559b4ca405 100644 --- a/solr/core/src/test/org/apache/solr/request/TestFaceting.java +++ b/solr/core/src/test/org/apache/solr/request/TestFaceting.java @@ -27,7 +27,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.BytesRef; import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.common.SolrException; import org.apache.solr.common.params.FacetParams; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.uninverting.DocTermOrds; @@ -336,7 +335,7 @@ public class TestFaceting extends SolrTestCaseJ4 { @Test public void testFacetSortWithMinCount0() { - assumeFalse("facet.mincount=0 doesn't work with point fields (SOLR-11174) or single valued DV", + assumeFalse("facet.mincount=0 doesn't work with point fields (SOLR-10033) or single valued DV", Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) || Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP)); assertU(adoc("id", "1", "f_td", "-420.126")); @@ -357,31 +356,8 @@ public class TestFaceting extends SolrTestCaseJ4 { "//lst[@name='facet_fields']/lst[@name='f_td']/int[3][@name='-1.218']"); } - @Test - public void testFacetOverPointFieldWithMinCount0() { - String field = "f_" + new String[]{"i","l","f","d"}[random().nextInt(4)] + "_p"; - final SolrQueryRequest req = req("q", "id:1.0", - FacetParams.FACET, "true", - FacetParams.FACET_FIELD, field, - FacetParams.FACET_MINCOUNT, "0", - FacetParams.FACET_METHOD, FacetParams.FACET_METHOD_fc); - Exception e = expectThrows(SolrException.class, () -> h.query(req)); - assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException)e).code()); - assertTrue(e.getMessage().contains("Cannot use facet.mincount=0 on field " + field + " which is Points-based")); - String mvField = "f_" + new String[]{"is","ls","fs","ds"}[random().nextInt(4)] + "_p"; - final SolrQueryRequest req2 = req("q", "id:1.0", - FacetParams.FACET, "true", - FacetParams.FACET_FIELD, mvField, - FacetParams.FACET_MINCOUNT, "0", - FacetParams.FACET_METHOD, FacetParams.FACET_METHOD_fc); - e = expectThrows(SolrException.class, () -> h.query(req2)); - assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ((SolrException)e).code()); - assertTrue(e.getMessage().contains("Cannot use facet.mincount=0 on field " + mvField + " which is Points-based")); - } - - - public void testSimpleFacetCountsWithMultipleConfigurationsForSameField() { + public void testSimpleFacetCountsWithMultipleConfigurationsForSameField() { clearIndex(); String fname = "trait_ss"; assertU(adoc("id", "42", From 0e927c6484bcbc0f29bf8089c87928329cf98d06 Mon Sep 17 00:00:00 2001 From: yonik Date: Tue, 1 Aug 2017 12:28:35 -0400 Subject: [PATCH 45/95] SOLR-10845: add Points support to graphTerms query --- solr/CHANGES.txt | 3 + .../solr/search/GraphTermsQParserPlugin.java | 464 ++++++++++++++++++ .../search/TestGraphTermsQParserPlugin.java | 2 - 3 files changed, 467 insertions(+), 2 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 8fc3f137d84..78456ae7052 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -332,6 +332,9 @@ New Features * SOLR-11093: Add support for PointFields for {!graph} query. (yonik) +* SOLR-10845: Add support for PointFields to {!graphTerms} query that is internally + used by some graph traversal streaming expressions. (yonik) + Bug Fixes ---------------------- * SOLR-9262: Connection and read timeouts are being ignored by UpdateShardHandler after SOLR-4509. diff --git a/solr/core/src/java/org/apache/solr/search/GraphTermsQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/GraphTermsQParserPlugin.java index d659265b1e4..4656afe0928 100644 --- a/solr/core/src/java/org/apache/solr/search/GraphTermsQParserPlugin.java +++ b/solr/core/src/java/org/apache/solr/search/GraphTermsQParserPlugin.java @@ -20,13 +20,22 @@ package org.apache.solr.search; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Set; +import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.FloatPoint; +import org.apache.lucene.document.IntPoint; +import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.PointValues; import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.PrefixCodedTerms; +import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermContext; import org.apache.lucene.index.TermState; @@ -45,13 +54,18 @@ import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BitDocIdSet; +import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.DocIdSetBuilder; import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.StringHelper; import org.apache.solr.common.params.SolrParams; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.schema.FieldType; +import org.apache.solr.schema.NumberType; +import org.apache.solr.schema.SchemaField; /** * The GraphTermsQuery builds a disjunction query from a list of terms. The terms are first filtered by the maxDocFreq parameter. @@ -79,6 +93,49 @@ public class GraphTermsQParserPlugin extends QParserPlugin { final String[] splitVals = qstr.split(","); + SchemaField sf = req.getSchema().getField(fname); + + // if we don't limit by maxDocFreq, then simply use a normal set query + if (maxDocFreq == Integer.MAX_VALUE) { + return sf.getType().getSetQuery(this, sf, Arrays.asList(splitVals)); + } + + if (sf.getType().isPointField()) { + PointSetQuery setQ = null; + if (sf.getType().getNumberType() == NumberType.INTEGER) { + int[] vals = new int[splitVals.length]; + for (int i=0; i 1 D case, where we are only looking at the first dimension's prefix bytes, it can at worst not hurt: + PrefixCodedTerms.Builder builder = new PrefixCodedTerms.Builder(); + BytesRefBuilder previous = null; + BytesRef current; + while ((current = packedPoints.next()) != null) { + if (current.length != numDims * bytesPerDim) { + throw new IllegalArgumentException("packed point length should be " + (numDims * bytesPerDim) + " but got " + current.length + "; field=\"" + field + "\" numDims=" + numDims + " bytesPerDim=" + bytesPerDim); + } + if (previous == null) { + previous = new BytesRefBuilder(); + } else { + int cmp = previous.get().compareTo(current); + if (cmp == 0) { + continue; // deduplicate + } else if (cmp > 0) { + throw new IllegalArgumentException("values are out of order: saw " + previous + " before " + current); + } + } + builder.add(field, current); + previous.copyBytes(current); + } + sortedPackedPoints = builder.finish(); + sortedPackedPointsHashCode = sortedPackedPoints.hashCode(); + } + + private FixedBitSet getLiveDocs(IndexSearcher searcher) throws IOException { + if (searcher instanceof SolrIndexSearcher) { + BitDocSet liveDocs = ((SolrIndexSearcher) searcher).getLiveDocs(); + FixedBitSet liveBits = liveDocs.size() == ((SolrIndexSearcher) searcher).maxDoc() ? null : liveDocs.getBits(); + return liveBits; + } else { + if (searcher.getTopReaderContext().reader().maxDoc() == searcher.getTopReaderContext().reader().numDocs()) return null; + FixedBitSet bs = new FixedBitSet(searcher.getTopReaderContext().reader().maxDoc()); + for (LeafReaderContext ctx : searcher.getTopReaderContext().leaves()) { + Bits liveDocs = ctx.reader().getLiveDocs(); + int max = ctx.reader().maxDoc(); + int base = ctx.docBase; + for (int i=0; i segs = top.leaves(); + DocSetBuilder builder = new DocSetBuilder(top.reader().maxDoc(), Math.min(64,(top.reader().maxDoc()>>>10)+4)); + PointValues[] segPoints = new PointValues[segs.size()]; + for (int i=0; i maxDocFreq) { + continue outer; + } + } + int collected = visitor.getCount(); + int[] ids = visitor.getGlobalIds(); + for (int i=0; i 0) { + return PointValues.Relation.CELL_OUTSIDE_QUERY; + } + + int cmpMax = StringHelper.compare(bytesPerDim, maxPackedValue, offset, pointBytes, offset); + if (cmpMax < 0) { + return PointValues.Relation.CELL_OUTSIDE_QUERY; + } + + if (cmpMin != 0 || cmpMax != 0) { + crosses = true; + } + } + + if (crosses) { + return PointValues.Relation.CELL_CROSSES_QUERY; + } else { + // NOTE: we only hit this if we are on a cell whose min and max values are exactly equal to our point, + // which can easily happen if many docs share this one value + return PointValues.Relation.CELL_INSIDE_QUERY; + } + } + } + + public String getField() { + return field; + } + + public int getNumDims() { + return numDims; + } + + public int getBytesPerDim() { + return bytesPerDim; + } + + @Override + public final int hashCode() { + int hash = classHash(); + hash = 31 * hash + field.hashCode(); + hash = 31 * hash + sortedPackedPointsHashCode; + hash = 31 * hash + numDims; + hash = 31 * hash + bytesPerDim; + hash = 31 * hash + maxDocFreq; + return hash; + } + + @Override + public final boolean equals(Object other) { + return sameClassAs(other) && + equalsTo(getClass().cast(other)); + } + + private boolean equalsTo(PointSetQuery other) { + return other.field.equals(field) && + other.numDims == numDims && + other.bytesPerDim == bytesPerDim && + other.sortedPackedPointsHashCode == sortedPackedPointsHashCode && + other.sortedPackedPoints.equals(sortedPackedPoints) && + other.maxDocFreq == maxDocFreq; + } + + @Override + public final String toString(String field) { + final StringBuilder sb = new StringBuilder(); + if (this.field.equals(field) == false) { + sb.append(this.field); + sb.append(':'); + } + + sb.append("{"); + + PrefixCodedTerms.TermIterator iterator = sortedPackedPoints.iterator(); + byte[] pointBytes = new byte[numDims * bytesPerDim]; + boolean first = true; + for (BytesRef point = iterator.next(); point != null; point = iterator.next()) { + if (first == false) { + sb.append(" "); + } + first = false; + System.arraycopy(point.bytes, point.offset, pointBytes, 0, pointBytes.length); + sb.append(toString(pointBytes)); + } + sb.append("}"); + return sb.toString(); + } + + protected abstract String toString(byte[] value); +} diff --git a/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java index f62a8bf98bb..1cb927d3644 100644 --- a/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java +++ b/solr/core/src/test/org/apache/solr/search/TestGraphTermsQParserPlugin.java @@ -19,7 +19,6 @@ package org.apache.solr.search; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.solr.SolrTestCaseJ4; -import org.apache.solr.SolrTestCaseJ4.SuppressPointFields; import org.apache.solr.common.params.ModifiableSolrParams; import org.junit.Before; import org.junit.BeforeClass; @@ -27,7 +26,6 @@ import org.junit.Test; //We want codecs that support DocValues, and ones supporting blank/empty values. @SuppressCodecs({"Appending","Lucene3x","Lucene40","Lucene41","Lucene42"}) -@SuppressPointFields(bugUrl="https://issues.apache.org/jira/browse/SOLR-10845") public class TestGraphTermsQParserPlugin extends SolrTestCaseJ4 { @BeforeClass From de5ae4096d85ae472af96fd973dd8163f4120da5 Mon Sep 17 00:00:00 2001 From: Cassandra Targett Date: Tue, 1 Aug 2017 12:43:02 -0500 Subject: [PATCH 46/95] SOLR-10831: add docs for replica types --- solr/solr-ref-guide/src/collections-api.adoc | 26 +++++++- ...shards-and-indexing-data-in-solrcloud.adoc | 64 +++++++++++++++++-- 2 files changed, 82 insertions(+), 8 deletions(-) diff --git a/solr/solr-ref-guide/src/collections-api.adoc b/solr/solr-ref-guide/src/collections-api.adoc index 4640d54f72e..09d79635042 100644 --- a/solr/solr-ref-guide/src/collections-api.adoc +++ b/solr/solr-ref-guide/src/collections-api.adoc @@ -54,7 +54,16 @@ The number of shards to be created as part of the collection. This is a required A comma separated list of shard names, e.g., `shard-x,shard-y,shard-z`. This is a required parameter when the `router.name` is `implicit`. `replicationFactor`:: -The number of replicas to be created for each shard. The default is `1`. +The number of replicas to be created for each shard. The default is `1`. This will create a NRT type of replica. If you want another type of replica, see the `tlogReplicas` and `pullReplica` parameters. See the section <> for more information about replica types. + +`nrtReplicas`:: +The number of NRT (Near-Real-Time) replicas to create for this collection. This type of replica maintains a transaction log and updates its index locally. If you want all of your replicas to be of this type, you can simply use `replicationFactor` instead. + +`tlogReplicas`:: +The number of TLOG replicas to create for this collection. This type of replica maintains a transaction log but only updates its index via replication from a leader. See the section <> for more information about replica types. + +`pullReplicas`:: +The number of PULL replicas to create for this collection. This type of replica does not maintain a transaction log and only updates its index via replication from a leader. This type is not eligible to become a leader and should not be the only type of replicas in the collection. See the section <> for more information about replica types. `maxShardsPerNode`:: When creating collections, the shards and/or replicas are spread across all available (i.e., live) nodes, and two replicas of the same shard will never be on the same node. @@ -711,10 +720,21 @@ Ignored if the `shard` param is also specified. The name of the node where the replica should be created. `instanceDir`:: -The instanceDir for the core that will be created +The instanceDir for the core that will be created. `dataDir`:: -The directory in which the core should be created +The directory in which the core should be created. + +`type`:: +The type of replica to create. These possible values are allowed: ++ +-- +* `nrt`: The NRT type maintains a transaction log and updates its index locally. This is the default and the most commonly used. +* `tlog`: The TLOG type maintains a transaction log but only updates its index via replication. +* `pull`: The PULL type does not maintain a transaction log and only updates its index via replication. This type is not eligible to become a leader. +-- ++ +See the section <> for more information about replica type options. `property._name_=_value_`:: Set core property _name_ to _value_. See <> for details about supported properties and values. diff --git a/solr/solr-ref-guide/src/shards-and-indexing-data-in-solrcloud.adoc b/solr/solr-ref-guide/src/shards-and-indexing-data-in-solrcloud.adoc index 3d0a87d97dc..8a78d45a4a7 100644 --- a/solr/solr-ref-guide/src/shards-and-indexing-data-in-solrcloud.adoc +++ b/solr/solr-ref-guide/src/shards-and-indexing-data-in-solrcloud.adoc @@ -20,7 +20,9 @@ When your collection is too large for one node, you can break it up and store it in sections by creating multiple *shards*. -A Shard is a logical partition of the collection, containing a subset of documents from the collection, such that every document in a collection is contained in exactly one Shard. Which shard contains a each document in a collection depends on the overall "Sharding" strategy for that collection. For example, you might have a collection where the "country" field of each document determines which shard it is part of, so documents from the same country are co-located. A different collection might simply use a "hash" on the uniqueKey of each document to determine its Shard. +A Shard is a logical partition of the collection, containing a subset of documents from the collection, such that every document in a collection is contained in exactly one Shard. Which shard contains each document in a collection depends on the overall "Sharding" strategy for that collection. + +For example, you might have a collection where the "country" field of each document determines which shard it is part of, so documents from the same country are co-located. A different collection might simply use a "hash" on the uniqueKey of each document to determine its Shard. Before SolrCloud, Solr supported Distributed Search, which allowed one query to be executed across multiple shards, so the query was executed against the entire Solr index and no documents would be missed from the search results. So splitting an index across shards is not exclusively a SolrCloud concept. There were, however, several problems with the distributed approach that necessitated improvement with SolrCloud: @@ -28,7 +30,9 @@ Before SolrCloud, Solr supported Distributed Search, which allowed one query to . There was no support for distributed indexing, which meant that you needed to explicitly send documents to a specific shard; Solr couldn't figure out on its own what shards to send documents to. . There was no load balancing or failover, so if you got a high number of queries, you needed to figure out where to send them and if one shard died it was just gone. -SolrCloud fixes all those problems. There is support for distributing both the index process and the queries automatically, and ZooKeeper provides failover and load balancing. Additionally, every shard can also have multiple replicas for additional robustness. +SolrCloud addresses those limitations. There is support for distributing both the index process and the queries automatically, and ZooKeeper provides failover and load balancing. Additionally, every shard can have multiple replicas for additional robustness. + +== Leaders and Replicas In SolrCloud there are no masters or slaves. Instead, every shard consists of at least one physical *replica*, exactly one of which is a *leader*. Leaders are automatically elected, initially on a first-come-first-served basis, and then based on the ZooKeeper process described at http://zookeeper.apache.org/doc/trunk/recipes.html#sc_leaderElection[http://zookeeper.apache.org/doc/trunk/recipes.html#sc_leaderElection.]. @@ -36,19 +40,69 @@ If a leader goes down, one of the other replicas is automatically elected as the When a document is sent to a Solr node for indexing, the system first determines which Shard that document belongs to, and then which node is currently hosting the leader for that shard. The document is then forwarded to the current leader for indexing, and the leader forwards the update to all of the other replicas. +=== Types of Replicas + +By default, all replicas are eligible to become leaders if their leader goes down. However, this comes at a cost: if all replicas could become a leader at any time, every replica must be in sync with its leader at all times. New documents added to the leader must be routed to the replicas, and each replica must do a commit. If a replica goes down, or is temporarily unavailable, and then rejoins the cluster, recovery may be slow if it has missed a large number of updates. + +These issues are not a problem for most users. However, some use cases would perform better if the replicas behaved a bit more like the former model, either by not syncing in real-time or by not being eligible to become leaders at all. + +Solr accomplishes this by allowing you to set the replica type when creating a new collection or when adding a replica. The available types are: + +* *NRT*: This is the default. A NRT replica (NRT = NearRealTime) maintains a transaction log and writes new documents to it's indexes locally. Any replica of this type is eligible to become a leader. Traditionally, this was the only type supported by Solr. +* *TLOG*: This type of replica maintains a transaction log but does not index document changes locally. This type helps speed up indexing since no commits need to occur in the replicas. When this type of replica needs to update its index, it does so by replicating the index from the leader. This type of replica is also eligible to become a shard leader; it would do so by first processing its transaction log. If it does become a leader, it will behave the same as if it was a NRT type of replica. +* *PULL*: This type of replica does not maintain a transaction log nor index document changes locally. It only replicates the index from the shard leader. It is not eligible to become a shard leader and doesn't participate in shard leader election at all. + +If you do not specify the type of replica when it is created, it will be NRT type. + +=== Combining Replica Types in a Cluster + +There are three combinations of replica types that are recommended: + +* All NRT replicas +* All PULL replicas +* TLOG replicas with PULL replicas + +==== All NRT Replicas + +Use this for small to medium clusters, or even big clusters where the update (index) throughput is not too high. NRT is the only type of replica that supports soft-commits, so also use this combination when NearRealTime is needed. + +==== All PULL Replicas + +Use this combination if NearRealTime is not needed and the number of replicas per shard is high, but you still want all replicas to be able to handle update requests. + +==== TLOG replicas plus PULL replicas + +Use this combination if NearRealTime is not needed, the number of replicas per shard is high, and you want to increase availability of search queries over document updates even if that means temporarily serving outdated results. + +==== Other Combinations of Replica Types + +Other combinations of replica types are not recommended. If more than one replica in the shard is writing its own index instead of replicating from an NRT replica, a leader election can cause all replicas of the shard to become out of sync with the leader, and all would have to replicate the full index. + +=== Recovery with PULL Replicas + +If a PULL replica goes down or leaves the cluster, there are a few scenarios to consider. + +If the PULL replica cannot sync to the leader because the leader is down, replication would not occur. However, it would continue to serve queries. Once it can connect to the leader again, replication would resume. + +If the PULL replica cannot connect to ZooKeeper, it would be removed from the cluster and queries would not be routed to it from the cluster. + +If the PULL replica dies or is unreachable for any other reason, it won't be query-able. When it rejoins the cluster, it would replicate from the leader and when that is complete, it would be ready to serve queries again. + == Document Routing Solr offers the ability to specify the router implementation used by a collection by specifying the `router.name` parameter when <>. -If you use the (default) "```compositeId```" router, you can send documents with a prefix in the document ID which will be used to calculate the hash Solr uses to determine the shard a document is sent to for indexing. The prefix can be anything you'd like it to be (it doesn't have to be the shard name, for example), but it must be consistent so Solr behaves consistently. For example, if you wanted to co-locate documents for a customer, you could use the customer name or ID as the prefix. If your customer is "IBM", for example, with a document with the ID "12345", you would insert the prefix into the document id field: "IBM!12345". The exclamation mark ('!') is critical here, as it distinguishes the prefix used to determine which shard to direct the document to. +If you use the `compositeId` router (the default), you can send documents with a prefix in the document ID which will be used to calculate the hash Solr uses to determine the shard a document is sent to for indexing. The prefix can be anything you'd like it to be (it doesn't have to be the shard name, for example), but it must be consistent so Solr behaves consistently. + +For example, if you want to co-locate documents for a customer, you could use the customer name or ID as the prefix. If your customer is "IBM", for example, with a document with the ID "12345", you would insert the prefix into the document id field: "IBM!12345". The exclamation mark ('!') is critical here, as it distinguishes the prefix used to determine which shard to direct the document to. Then at query time, you include the prefix(es) into your query with the `\_route_` parameter (i.e., `q=solr&_route_=IBM!`) to direct queries to specific shards. In some situations, this may improve query performance because it overcomes network latency when querying all the shards. The `compositeId` router supports prefixes containing up to 2 levels of routing. For example: a prefix routing first by region, then by customer: "USA!IBM!12345" -Another use case could be if the customer "IBM" has a lot of documents and you want to spread it across multiple shards. The syntax for such a use case would be : "shard_key/num!document_id" where the /num is the number of bits from the shard key to use in the composite hash. +Another use case could be if the customer "IBM" has a lot of documents and you want to spread it across multiple shards. The syntax for such a use case would be : `shard_key/num!document_id` where the `/num` is the number of bits from the shard key to use in the composite hash. -So "IBM/3!12345" will take 3 bits from the shard key and 29 bits from the unique doc id, spreading the tenant over 1/8th of the shards in the collection. Likewise if the num value was 2 it would spread the documents across 1/4th the number of shards. At query time, you include the prefix(es) along with the number of bits into your query with the `\_route_` parameter (i.e., `q=solr&_route_=IBM/3!`) to direct queries to specific shards. +So `IBM/3!12345` will take 3 bits from the shard key and 29 bits from the unique doc id, spreading the tenant over 1/8th of the shards in the collection. Likewise if the num value was 2 it would spread the documents across 1/4th the number of shards. At query time, you include the prefix(es) along with the number of bits into your query with the `\_route_` parameter (i.e., `q=solr&_route_=IBM/3!`) to direct queries to specific shards. If you do not want to influence how documents are stored, you don't need to specify a prefix in your document ID. From d696b5986b8d903a4f4f3d75346fc5715ebe9a0e Mon Sep 17 00:00:00 2001 From: yonik Date: Tue, 1 Aug 2017 13:48:45 -0400 Subject: [PATCH 47/95] SOLR-10845: fix QueryEqualityTest unknown schema field for graphTerms query --- .../core/src/test/org/apache/solr/search/QueryEqualityTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java index aece15ea10c..6e747001cd5 100644 --- a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java +++ b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java @@ -171,7 +171,7 @@ public class QueryEqualityTest extends SolrTestCaseJ4 { public void testGraphTermsQuery() throws Exception { SolrQueryRequest req = req("q", "*:*"); try { - assertQueryEquals("graphTerms", req, "{!graphTerms f=field1 maxDocFreq=1000}term1,term2"); + assertQueryEquals("graphTerms", req, "{!graphTerms f=field1_s maxDocFreq=1000}term1,term2"); } finally { req.close(); } From 6b399d7a6178ae04b4905f31921de1e31fa6c033 Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Tue, 1 Aug 2017 15:32:54 -0400 Subject: [PATCH 48/95] SOLR-10033: When attempting to facet with facet.mincount=0 over points fields, raise mincount to 1 and log a warning. --- solr/CHANGES.txt | 3 +++ .../java/org/apache/solr/request/NumericFacets.java | 2 +- .../java/org/apache/solr/request/SimpleFacets.java | 12 ++++++++---- .../test/org/apache/solr/request/TestFaceting.java | 2 +- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 78456ae7052..bd56c6a2c18 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -604,6 +604,9 @@ Other Changes * SOLR-9321: Remove deprecated methods of ClusterState. (Jason Gerlowski, ishan, Cao Manh Dat) +* SOLR-10033: When attempting to facet with facet.mincount=0 over points fields, raise mincount to 1 + and log a warning. (Steve Rowe) + ================== 6.7.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/core/src/java/org/apache/solr/request/NumericFacets.java b/solr/core/src/java/org/apache/solr/request/NumericFacets.java index fd17f1f7397..b509995efad 100644 --- a/solr/core/src/java/org/apache/solr/request/NumericFacets.java +++ b/solr/core/src/java/org/apache/solr/request/NumericFacets.java @@ -407,7 +407,7 @@ final class NumericFacets { private static NamedList getCountsMultiValued(SolrIndexSearcher searcher, DocSet docs, String fieldName, int offset, int limit, int mincount, boolean missing, String sort) throws IOException { // If facet.mincount=0 with PointFields the only option is to get the values from DocValues - // not currently supported. See SOLR-10033 + // not currently supported. See SOLR-11174 mincount = Math.max(mincount, 1); final SchemaField sf = searcher.getSchema().getField(fieldName); final FieldType ft = sf.getType(); diff --git a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java index f39eda47360..085b47e8a63 100644 --- a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java +++ b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java @@ -17,6 +17,7 @@ package org.apache.solr.request; import java.io.IOException; +import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -93,6 +94,8 @@ import org.apache.solr.search.grouping.GroupingSpecification; import org.apache.solr.util.BoundedTreeSet; import org.apache.solr.util.DefaultSolrThreadFactory; import org.apache.solr.util.RTimer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.solr.common.params.CommonParams.SORT; @@ -103,6 +106,7 @@ import static org.apache.solr.common.params.CommonParams.SORT; * to leverage any of its functionality. */ public class SimpleFacets { + private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); /** The main set of documents all facet counts should be relative to */ protected DocSet docsOrig; @@ -492,10 +496,10 @@ public class SimpleFacets { + FacetParams.FACET_CONTAINS + ", " + FacetParams.FACET_EXCLUDETERMS + ") are not supported on numeric types"); } -// We should do this, but mincount=0 is currently the default -// if (ft.isPointField() && mincount <= 0) { -// throw new SolrException(ErrorCode.BAD_REQUEST, FacetParams.FACET_MINCOUNT + " <= 0 is not supported on point types"); -// } + if (ft.isPointField() && mincount <= 0) { // default is mincount=0. See SOLR-10033 & SOLR-11174. + LOG.warn("Raising facet.mincount from " + mincount + " to 1, because field " + field + " is Points-based."); + mincount = 1; + } counts = NumericFacets.getCounts(searcher, docs, field, offset, limit, mincount, missing, sort); } else { PerSegmentSingleValuedFaceting ps = new PerSegmentSingleValuedFaceting(searcher, docs, field, offset, limit, mincount, missing, sort, prefix, termFilter); diff --git a/solr/core/src/test/org/apache/solr/request/TestFaceting.java b/solr/core/src/test/org/apache/solr/request/TestFaceting.java index 9559b4ca405..32cb177fa0a 100644 --- a/solr/core/src/test/org/apache/solr/request/TestFaceting.java +++ b/solr/core/src/test/org/apache/solr/request/TestFaceting.java @@ -335,7 +335,7 @@ public class TestFaceting extends SolrTestCaseJ4 { @Test public void testFacetSortWithMinCount0() { - assumeFalse("facet.mincount=0 doesn't work with point fields (SOLR-10033) or single valued DV", + assumeFalse("facet.mincount=0 doesn't work with point fields (SOLR-11174) or single valued DV", Boolean.getBoolean(NUMERIC_POINTS_SYSPROP) || Boolean.getBoolean(NUMERIC_DOCVALUES_SYSPROP)); assertU(adoc("id", "1", "f_td", "-420.126")); From 9c154ffbb8cbc67e8284e8971c84689f5810a977 Mon Sep 17 00:00:00 2001 From: Erick Erickson Date: Tue, 1 Aug 2017 14:10:10 -0700 Subject: [PATCH 49/95] LUCENE-7915: fix precommit problem --- .../core/src/java/org/apache/lucene/index/TieredMergePolicy.java | 1 - 1 file changed, 1 deletion(-) diff --git a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java index bba57efff39..b5185fc9aef 100644 --- a/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java +++ b/lucene/core/src/java/org/apache/lucene/index/TieredMergePolicy.java @@ -20,7 +20,6 @@ package org.apache.lucene.index; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; From 0e2ec280be6764dc84d901394f4c24a55bf291be Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Tue, 1 Aug 2017 19:31:31 -0400 Subject: [PATCH 50/95] SOLR-10033: return warning in response header about raising mincount above 0 with points fields --- .../org/apache/solr/request/SimpleFacets.java | 11 +++++++++- .../org/apache/solr/request/TestFaceting.java | 22 ++++++++++++++++++- 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java index 085b47e8a63..4a911d21a62 100644 --- a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java +++ b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java @@ -497,7 +497,16 @@ public class SimpleFacets { + FacetParams.FACET_EXCLUDETERMS + ") are not supported on numeric types"); } if (ft.isPointField() && mincount <= 0) { // default is mincount=0. See SOLR-10033 & SOLR-11174. - LOG.warn("Raising facet.mincount from " + mincount + " to 1, because field " + field + " is Points-based."); + String warningMessage + = "Raising facet.mincount from " + mincount + " to 1, because field " + field + " is Points-based."; + LOG.warn(warningMessage); + List warnings = (List)rb.rsp.getResponseHeader().get("warnings"); + if (null == warnings) { + warnings = new ArrayList<>(); + rb.rsp.getResponseHeader().add("warnings", warnings); + } + warnings.add(warningMessage); + mincount = 1; } counts = NumericFacets.getCounts(searcher, docs, field, offset, limit, mincount, missing, sort); diff --git a/solr/core/src/test/org/apache/solr/request/TestFaceting.java b/solr/core/src/test/org/apache/solr/request/TestFaceting.java index 32cb177fa0a..ff7d72013ef 100644 --- a/solr/core/src/test/org/apache/solr/request/TestFaceting.java +++ b/solr/core/src/test/org/apache/solr/request/TestFaceting.java @@ -356,8 +356,28 @@ public class TestFaceting extends SolrTestCaseJ4 { "//lst[@name='facet_fields']/lst[@name='f_td']/int[3][@name='-1.218']"); } + @Test + public void testFacetOverPointFieldWithMinCount0() { + String field = "f_" + new String[]{"i","l","f","d"}[random().nextInt(4)] + "_p"; + String expectedWarning = "Raising facet.mincount from 0 to 1, because field " + field + " is Points-based."; + SolrQueryRequest req = req("q", "id:1.0", + FacetParams.FACET, "true", + FacetParams.FACET_FIELD, field, + FacetParams.FACET_MINCOUNT, "0"); + assertQ(req + , "/response/lst[@name='responseHeader']/arr[@name='warnings']/str[.='" + expectedWarning + "']"); + + field = "f_" + new String[]{"is","ls","fs","ds"}[random().nextInt(4)] + "_p"; + expectedWarning = "Raising facet.mincount from 0 to 1, because field " + field + " is Points-based."; + req = req("q", "id:1.0", + FacetParams.FACET, "true", + FacetParams.FACET_FIELD, field, + FacetParams.FACET_MINCOUNT, "0"); + assertQ(req + , "/response/lst[@name='responseHeader']/arr[@name='warnings']/str[.='" + expectedWarning + "']"); + } - public void testSimpleFacetCountsWithMultipleConfigurationsForSameField() { + public void testSimpleFacetCountsWithMultipleConfigurationsForSameField() { clearIndex(); String fname = "trait_ss"; assertU(adoc("id", "42", From edd9c11329e86e937b62967184d3d0c54f030e0d Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Wed, 2 Aug 2017 09:07:19 -0400 Subject: [PATCH 51/95] SOLR-10033: TestDistributedSearch: ignore response header warnings section when comparing distributed and control responses --- .../org/apache/solr/BaseDistributedSearchTestCase.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java b/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java index c46d3461040..253ba405dd9 100644 --- a/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java +++ b/solr/test-framework/src/java/org/apache/solr/BaseDistributedSearchTestCase.java @@ -929,6 +929,14 @@ public abstract class BaseDistributedSearchTestCase extends SolrTestCaseJ4 { } } } + { // we don't care if one has a warnings section in the header and the other doesn't - control vs distrib + if (a.getHeader() != null) { + a.getHeader().remove("warnings"); + } + if (b.getHeader() != null) { + b.getHeader().remove("warnings"); + } + } compareSolrResponses(a, b); } From 83cb55fa3744c6bba2dbc9ba35c1ff7d84be8c31 Mon Sep 17 00:00:00 2001 From: Cassandra Targett Date: Wed, 2 Aug 2017 09:08:49 -0500 Subject: [PATCH 52/95] Ref Guide: Ensure "Comments on this Page" doesn't show up in page TOCs --- solr/solr-ref-guide/src/_layouts/page.html | 4 ++-- solr/solr-ref-guide/src/css/comments.css | 4 ---- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/solr/solr-ref-guide/src/_layouts/page.html b/solr/solr-ref-guide/src/_layouts/page.html index 85bd84dbfd2..137cd447fac 100755 --- a/solr/solr-ref-guide/src/_layouts/page.html +++ b/solr/solr-ref-guide/src/_layouts/page.html @@ -70,8 +70,8 @@ layout: default
-

Comments on this Page

-

We welcome feedback on Solr documentation. However, we cannot provide application support via comments. If you need help, please send a message to the Solr User mailing list.

+
Comments on this Page
+

We welcome feedback on Solr documentation. However, we cannot provide application support via comments. If you need help, please send a message to the Solr User mailing list.

diff --git a/solr/solr-ref-guide/src/css/comments.css b/solr/solr-ref-guide/src/css/comments.css index 1292d23ebb7..f59796affcf 100644 --- a/solr/solr-ref-guide/src/css/comments.css +++ b/solr/solr-ref-guide/src/css/comments.css @@ -22,10 +22,6 @@ /* in general */ -#comments_thread p { - line-height: 1.3em; - color: #003; -} #comments_thread h4 { font-size: 14px; From 265a440e33e5cab59ec3c56162022d037edadcea Mon Sep 17 00:00:00 2001 From: Cassandra Targett Date: Wed, 2 Aug 2017 13:37:19 -0500 Subject: [PATCH 53/95] SOLR-10347: Update Ref Guide screenshot; minor doc cleanup --- solr/solr-ref-guide/src/documents-screen.adoc | 14 +++++++------- .../documents-screen/documents_add_screen.png | Bin 58783 -> 181627 bytes 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/solr/solr-ref-guide/src/documents-screen.adoc b/solr/solr-ref-guide/src/documents-screen.adoc index 7c16ee98663..fe64ed8eade 100644 --- a/solr/solr-ref-guide/src/documents-screen.adoc +++ b/solr/solr-ref-guide/src/documents-screen.adoc @@ -38,7 +38,7 @@ There are other ways to load data, see also these sections: * <> ==== -The first step is to define the RequestHandler to use (aka, 'qt'). By default `/update` will be defined. To use Solr Cell, for example, change the request handler to `/update/extract`. +The first step is to define the RequestHandler to use (aka, `qt`). By default `/update` will be defined. To use Solr Cell, for example, change the request handler to `/update/extract`. Then choose the Document Type to define the type of document to load. The remaining parameters will change depending on the document type selected. @@ -46,7 +46,7 @@ Then choose the Document Type to define the type of document to load. The remain When using the JSON document type, the functionality is similar to using a requestHandler on the command line. Instead of putting the documents in a curl command, they can instead be input into the Document entry box. The document structure should still be in proper JSON format. -Then you can choose when documents should be added to the index (Commit Within), & whether existing documents should be overwritten with incoming documents with the same id (if this is not *true*, then the incoming documents will be dropped). +Then you can choose when documents should be added to the index (Commit Within), & whether existing documents should be overwritten with incoming documents with the same id (if this is not `true`, then the incoming documents will be dropped). This option will only add or overwrite documents to the index; for other update tasks, see the <> option. @@ -54,19 +54,19 @@ This option will only add or overwrite documents to the index; for other update When using the CSV document type, the functionality is similar to using a requestHandler on the command line. Instead of putting the documents in a curl command, they can instead be input into the Document entry box. The document structure should still be in proper CSV format, with columns delimited and one row per document. -Then you can choose when documents should be added to the index (Commit Within), and whether existing documents should be overwritten with incoming documents with the same id (if this is not *true*, then the incoming documents will be dropped). +Then you can choose when documents should be added to the index (Commit Within), and whether existing documents should be overwritten with incoming documents with the same id (if this is not `true`, then the incoming documents will be dropped). == Document Builder -The Document Builder provides a wizard-like interface to enter fields of a document +The Document Builder provides a wizard-like interface to enter fields of a document. == File Upload The File Upload option allows choosing a prepared file and uploading it. If using only `/update` for the Request-Handler option, you will be limited to XML, CSV, and JSON. -However, to use the ExtractingRequestHandler (aka Solr Cell), you can modify the Request-Handler to `/update/extract`. You must have this defined in your `solrconfig.xml` file, with your desired defaults. You should also update the `&literal.id` shown in the Extracting Req. Handler Params so the file chosen is given a unique id. +However, to use the ExtractingRequestHandler (aka Solr Cell), you can modify the Request-Handler to `/update/extract`. You must have this defined in your `solrconfig.xml` file, with your desired defaults. You should also add `&literal.id` shown in the "Extracting Req. Handler Params" field so the file chosen is given a unique id. -Then you can choose when documents should be added to the index (Commit Within), and whether existing documents should be overwritten with incoming documents with the same id (if this is not *true*, then the incoming documents will be dropped). +Then you can choose when documents should be added to the index (Commit Within), and whether existing documents should be overwritten with incoming documents with the same id (if this is not `true`, then the incoming documents will be dropped). == Solr Command @@ -78,6 +78,6 @@ The documents should be structured as they would be if using `/update` on the co When using the XML document type, the functionality is similar to using a requestHandler on the command line. Instead of putting the documents in a curl command, they can instead be input into the Document entry box. The document structure should still be in proper Solr XML format, with each document separated by `` tags and each field defined. -Then you can choose when documents should be added to the index (Commit Within), and whether existing documents should be overwritten with incoming documents with the same id (if this is not **true**, then the incoming documents will be dropped). +Then you can choose when documents should be added to the index (Commit Within), and whether existing documents should be overwritten with incoming documents with the same id (if this is not `true`, then the incoming documents will be dropped). This option will only add or overwrite documents to the index; for other update tasks, see the <> option. diff --git a/solr/solr-ref-guide/src/images/documents-screen/documents_add_screen.png b/solr/solr-ref-guide/src/images/documents-screen/documents_add_screen.png index 571d40ac1ddd63fd7596650df5656518779a6a03..bbb5eb7c4b3356be64d78816bc44282a1609f80e 100644 GIT binary patch literal 181627 zcmdRVb9iOT(syiUVq+$@Z6_1kwmGpm*_jv<+qP}nwrzYl_r5sqz31Hjzx_P>*}Ya* zbyfemx~f*K-eGdz#o%GEVSs>u;3dR`6@Y-iKbs&$P!OL#J92)yKtM1R=0ZYp5<)_R zat^j8=2pf)K;mKX36M&tOV|S!qq!Iwg5(NzoFRgi)Ex``Jn=?1eixiMf_mDYixF7MlJ*10bD4))*oN7yzz9ZkLJk9M+c5=|3kbXe&8@zwdNqcavrtT}m9TC?G#6`(EPE$9?IKEPE zBo9Hu+Kc@F0|E)^LH-KN?;X{jytGPMix32;PWBzZ$UOC{MrBLx5#k`&3pzTY%6}pq ze!lupB0HjE+4;)CkxB|Z!8`I33jbAFAKP^9qY3vz^T>R=$-^u=Acp!HTEG;d*<`MW z>Xno_R2P?hz;j*t9gZx*tNS47_YpqR05>3H12;mkNBba&B&8!EqYvS?;Z!)1gqOgw zfse!+SbZQWBahGld-O$L`p)WZxpW>wPV&f0^r> z*uZw);-`A`U7Syx(u4{mMHyQG``*C^?c7u?LI916ffCXrMIGUS`6@y3Lq<}e8&CAp z&+w`7gG~D(#Q9=Rf;@L(o=rfo17Xw}>LNjP^E0aR-bA3_#RF@h9XK4l?_Nl8bP}FZv6-*FTj| zc8~zZFFT8!92$k+zzn_?n$kZ!i}4Uj3G@vlEr3>FA(QtIm>DWk&^C*X9z;=(vlQl} zJD(obCFpIF-VG}i%(Sz~j<*$_Hi&9-$Bx7kx*d2U2y#>P5)1^1@l#PG!@!_M5}tD+FU7Ol0t^+EQ5<@@;|Oo%!QjSfa92q(Zl zND_x+0bLfT8Hf=mkqsvwVkQemdL3F4x)L@JO4IwhSAUySA|Y4Ifix&uK^#XSFV}tA zc-nB9i!r%WqMigY_GuS>SK(UXTKSsriAe-tAZ}kkofk^6LarRgIt)6@MHyc#<($u{uXgmN@9&6YP{6w27l zc+QBM?98~wcwqEw{LRR`dcWGQ`lwpd*wVOS2r#&{=d{PY=jQO_AhyT38*Bz86S4n& zXPR{iFhx5Pe8k?2pC%)lg)t>N>2bJo?QpGfjeJeDH?l`Jh)9nVOdBvJZfIqiYOreP zDKDpNrz|eFm@QXfC$*Kc4I+Zk2i7Op2hrCOt{aymnJ!r*$rYaxmriaN2Ol30?-++j zsjGso$*;Rv>D!WN*Hr|LsH}%36ps`?M=7SZh9*&?d!t@sH=~-P8yc7zb~M8@ z=V}TxZPW{zteWjrjMb8roMz4&DJFP&ZA;B-&%YzfA_w!=O=eYASjSyDtob6wVGOK| zwd;D8o0iHNxNIMm*86!!-s9df5c&~{5b6*}B5u*YQghau4a(0%@3{5acZMB(IdEDu z?^N$e-qG569Tgb~I#4*e?w;#p>APz@v1DUni(XN0h&sPNw_ID{MnbfW5{$x^<(ROW zV4Nt>Nzswean{*cpIKeHh``H;a;VFY3eJjYSW=h zg_AsRsSBDJ&3cEWy1n6%nFQU?c)(t7I-sXtq<^m8YY)tUk{$Kx^zrR#N6YnG=bS1X z)|Sndxr-r-eM=%L($FF_K0KRd!6UPIab#z9%7WN2gB5v%d+9FqdgQo45m?xeFg z?esSaGU9_<$)$Mz__Y!^RcrM-;svUQgSn)+)wwfE=6NeErp9^avuNkZ=11qVle+nX z3WD{`wk&rNbHn5QjqB0t@SAnp@YUIvij)o_pF_wCf@7Fm7-8_Y-*~J9&&W3<9|P$$ z37YrTPdbBCtvW_3N(#5mRIVp?8KXXwZ{`-7+x6*ZtSgA;oo7g=jSVwDOr5K&>}{?l z%YC&YTasH~T{|{VTMjhBl!vm~a@)4sK=a=7#$IY4&K4i)sGCl0nlyj(c*nnK-urNO za@n0^j&wZ>6^Dc)cyWyKd47aGMEAq2g~#MH<}8V25Ja%g-!YvWq)6@!XAj#d`xpBi z|2iJWUP(#Oh+jT)wo7ZYxqY18X(QwQ&b)xLKwz%N{3Q*HtdC6h9pz!9P0_nxS+?f; zLdFzF%?I_Fl|IRAW3hcJMU#0ZM|0Cf>P6E%dPqV|N6M?lrzv_qfj_~nXiAhll__F4 z(tWRmUg@X(s*Pz%e-FOwzO)wurl3HSaFwdQzq5GW24*D)9$!)zTM*W(0#FXp}Wjy zm*MKOE3h2EhFC-Jop<>X^QG}JY&15efKryqo9(epztd#1J99wqp=Q!FD!5Uo;(hRS z=fv*a@%SbN2cL!aRrJ>6 z_-x))@}hWMv&}XGD#h)Y=&fgWpj%clqnNLT*VQZR_1b{(fa=}yh%LaDX5+1$FI_St z1;s%)6^qI@6ZE?@*rqiakkTyIq`Q0vAfh813 zFF%u82y-PhCp8&qPD5L3T74s117lh@Yr9WsARulx&d;W`v6DWbo3)jVBc~e=@joay zKihxYrXwc&2Z@s<53!nz9HEe{gE1isEi)}WF)s`uAtASekqM`Qu;_oWe_rtrn>jhz zanjMby1LT3GSS*Pn9?zDaB$GkGtx0K(tJ|TIR3D4(s!e=aU}WYApbp%u(6||gSnlP zxvdT1ALHs9*g8A$5EK6)=zl){)YI6_{C_3cIR4kPJ|{@`M-LqXEj`^|u{oKW{0-Y5 zJ^y6;$GrY2j{A?nIOWXUjIGp#&8>}X96wd#WuRwd=KhDAf9v{RP5;EI_9s@3|I7NP zu0OHJjgVGN+>5~NGawt9@ z`mY83V?1P_J?#Ie4LvY25Zz`hl?>{C(3u!$nB}kL^4}`c>VXgg>2#8*rhNO08UHtx zFi@Q3-@@`qq}2%~4Akl?Th1)=ci??eg#wW;{2QqME#edIMqlVqAUEI)E%TKBgfOsN z`G0_o$PO443HTC(w`rRBFIVxu1cGF0f&NMMzaqT}@{JH=6Wr4w&-_17@q_U{Z#=FXMmmgdy1JNNZ(45e0KrTYfiMvr9ee;DSN<|pbMr~uFyOBZ?h*s_7g49r z74)Mf+Jz0Zq<>;tE)?28RF3BbZQR>6Dm5fxXL=)^IPhZW>{@LcB zY;71U$>2~fs28GSgYQb7e7qtfg03J+srV|M#(u`CpZPw%1q`qA0hj+vZkPHIH@?KR+l3gkzWV7=LUxJ(g?B8s)Oy7u3$DPjLw?1dP8m|y-c`XG4%6HS8TV`nKT8KLVcquY}O zS!$q@ov;|rzx1GwAt*?&oj!+Qd}$^vr5C5HUu;)>TTSx`4f-)8fokJb$xC?Ht?c43 zGPt?WA4=otMS-%|Hu6iK!&NHy_vWROo?SsG*=d%bV(wC_Y$5AxWw^pmPx>?6R-K`K zdlG=W+!q9iO6Sr|3ljN|Sa!w=MI-&S#q&d@@`3nxKh58$s!|+zq7YK?qOX+J%88-F z-^pC?AptVU-9&!Hh>c5v5KbJ0C_+0}K_8w$uIvjzxt)G}bUlKxv_S|z@PRUknJH=u%NCJXWm0SdcSJ!wP5rJAGyeKK=cC zhNC>6MRxFog7P47#kLvoG(oz5H)H`t@_5MulAEw_vHP<4pdz3YRQ8ygdyAaH*EC8k zP|Kq&T&329g?i%+P6%ua&8%iVE$&SUR?lo2AEFwCy3vwSfuI!?4H~izEr=stXQJ5= zN%!ycfC47_3}1T+b|n1h9;B~TR7>^HN2sD%23>+l!FP(@>Cv_Hkm|2X5fOS)65phF zu6dI~2Pn9`HW!_7{S@S^-ZF zMDh>q!)w4GU?5)_-S+DK_3lCfq5VXpS#A(eK7eNXik3|B(wUHkVE8a@_hpYldy=X; z%ipMioDBH0s#R6%J1|5IwDf$@OXQ}_2A8E8sd6OgViWKbTcK|?y~OeQz?^iGO{^45 zR7`<85DToMt>*f@N`Wt~?63U`Ay^>4^&bmjavgOm?|2fOsZ#F&S^aI=(?>sxJ}|aO zM}z{mRxTtjex6V3GQVIj~_@KY@Pxr#yA5~{R5Ac#Av$h5(+k?trSiqq65SY zM&P%|-c5^-o3j1}(4yi)mk~LCj`9ogWE+M9-y2dMSG~p1Ni5nX1st|dLm7&|-{Eo) z`WcuGpiVa^HBqMENZ-|`XMWus;@PRm&^ zIbpUAf&3C_P8lCT1dz0BIUbZj<@O0J#-A?-hbKGgS*?qf!Dul_lo_JmzxwO()nn+; zv}8{C(_C85ApFps5F2k?vEL3F(C1Ca4W_<1?!4`|N(QcOZhBu5BJ_x`5}oOxMRzogXJjhY30|gJQD4-w8u|ZO-N1WZ?A_D_kKXL>m}3^ zzEpKd{{8P3S}+s+b1zyFJ%#*WcV>#3q{rk6>691U_a=RAN|mp9zOh)-DFYf!aEZq< zmm9KB|B&Y_i4PR4ekYOQ35a49ZoCZnFxSXR7>svL<7yYEEMEiPC~_ry&STp`TLJ6C znJ4yGw9B+~lsTwTp(L7`q``lRiWGaYeNj^Jxs%h=mM*K&-lq7Q^wY274>R;ZetcFe z%?EO4iwLP&)7u0aD;c_9{#V{_GUM!UP03U-aYmOKZc27p7Lk* zqUHNXJ`5y#Okf^EtC8$4v?(#~0{9|1W`N6u0)qELY|H-1&J7|4MjTMKYPFQxOc6PG zwl!c6TeHPb2r4N0Q^Ed8b}B`=(z*a#2&Cs&hass9YH9lyI)eTc-Qk!j(%oQet%DtC z+}T=C>5EO^8Zuab520SsWG`PlSD5v_0;Y$P;8Mqi@KL&UIpaq)9viNYI$o~czk1_w zXus-e#<=*zwAj|KihuNWHvN(JHht3~xN${TtJ^=wVr0@TAv!W&x3{0OJP|PbfjS6M z>1gZ59O@LLjQ*u_Q!J!;v+rqY)d#?H1l8_=-I`+dq^1DKF*-k?*P4y%ZRW&}OC9ft z<&8@~e}#HG1QlL?1ERr22@DT_G!xmTs65E;Mp_S?PHYMDDKD18E47o;hC%lhm=8xc z#T{1t507NTpPKSr3QisrmxyzQU+)@40;#|KXgE1cEt%kmn=^+#L_?J#PGhBB{NCZd zB*89BPiB}E$=iB4aJ6=c&4b$|L^pGt)6sXbknLcIqJAN7gk7yUMsYO}p@5^mQHiky zdqA>X&!v|cBuZU05dCyZQn*?T-7lI}8zl6dwKZWMl({}atZ&;@&RZ`^B~Wp*RAv2% z-OhlEKtuxTZ*y4fPk*0ITLep<-c^`(Uwvn()s4?KQDg2Hvomb8tH!{U-O}{lMwd;#2+QYcE?}e!Ezd}Hk~UwZdZYC*d$K{S zk7*vt>y1qW@mQ15battDk#0u!rN1@2)mmH_fgxb-hO0_PxGH(Kzyyb?_>PS|PY9T| znh)BlGvI2XB2~S51nT@KlQgBH5}7~w5PhOXL4b4tZp1Gj*;gOdH$(LgC+P5F3E3wz zdQepdw#Xa#_tc_O0{XI$x@Ekp>37Jp2KT3r#`qHXYvbwTPa@5^+Mb*1uwA2?lnHT^bU%{O2 zR7JQk6{YahHl2QEbWh5o7<$mD+UpH;ZlB=yPCW^c-3cmW##0zHql!ZQd9I3_TRz(^ z_ChK-4L6GszWCv%kYHe(Zk(RiI~cX=LdMb{lharPj(5^Fq1EPzg&!eqFn=p@}svDi#+`vPW@e5 zc$*;1jqLZ0EM0E@{AQ`)Qv&>@j+MDkkwozoFvheWkAh6)bO=VoA(w%f=0Y3aaR&Hj>EZrJ-fdF;Z>Ib!~?nb|Z=b+|Srt;qM|@v&Bgpk7Re;2fMWi zp7iz$eAx%bsEUwb`8WNp?;dnai`SQX9Lv zZFBp&>5|yf>+D1HE&$G7{~Z$d{AM6*|zvNDmL22(-xOmZ7eRcD6+jfc(z*ThL8)rSyQy`f?) zeB`!fmV_`NW%y0B6DczE5Cu6UMM#t`VM>EVl#BJwme36I(x5xN{mM7h=YW+4L&rey zNR3*N9eNUkbFR?VY(_A#{w(bXQ6(Bk2PVMQ;1qL8J;j%^l{(@G0Yqi(-+5m`%)<-3 zLI9o8&No-^qMjozhqLWi(k`$70^Y#dQvlLzR04SFrygsD1uB=*Y}_w(){xzf z9N5^1Z3hD-*adGnTOcz-6Mww08X4p+3`l!-`<)1E@TXQ??W-C}#m*M{YEB+WmMvEZ z2hW^{?@lhaL&gqa5ZMNOx4wj|eUNhcpc83S`>cJVX6p`8&;hMZVfU#@$#TN^O4+r{ z;mmsUB*}U-eP%>>;`1O6f~7z^dSXJeLE^)k0b0UjO&v*=DwUXEg9#w;eC*`i?y-{= z&{OuN)P{oH6@9fl0*FalIfSI8=@>>;2aKxQasg_y!Gj-8<%|#Ym%Ex?EM4b`SXkz& zY}T3`Re$Yj*+N}%6adh)4VLc)&>WxA5;Yv2l#MgYqqR&QSX0e;WBzP*A7CJTeSLtK zm>4EY%7+&Ayssk+JGu)oadV4PhMjo;HeQX>K}zz9gn}mem8(z;3iC)V&S1F1i^FN9 zA`3ur2|}UU$VO^ErDoa0C^%mUQ0Dke zxj3{gs5B*UBTQE-wz0aGxw0Fr8yD=k3*rnptVGlSuC`x0s$@9<=qh*i4L~T-c}T)YcsJG`ntb_9K^6c z6|Aega4SojDz-FIn$-1-wGE^<7k|OPK|$n@$T62PBj&R>+po*BnIaQ`r&4_&C+=0ccdmsk~(Zo zipGWbK`*^l3!?}bq&HS^GU+;8uqcCWnbX3ovISgqM*lLh$DSS?(E8KJ^L{vB_@gH% z;+L9Hh^!hZ{`xgZzSB+CEf>RX7G0(32k*~gq_XvuR?s@nS1<0l6HK!E%S{dlzs3G{ zdVB4mJ<=(yAiDAVqeR;N){BPB%1R{YmU_MC$V9*O*+NCDKv{9z3!+~%5Oqe!NgwPj zLxWuQV6xno==txaybcQWdOjAM>h!#TP@u8o(+_%rT@c;)5P-<_#Mk_GB+7OEQQ#c(6tiKKyrk zdobs#(Dsb_iZ9l?c1^vz7x*svQtFfvbuozVMhfUpR&*)iQlG%upUoh;E8Jkjs_zZ% zj&X}mCJvD>#Xi1dx8h_Vhw_-%-QdIm&N+0V{1SDT47svvLi!a)^KL-msZ^*6m-y6 zu6kmRZ8=}>;OXF-m@%g@GVqPVwpTqqcOaLPjfvP}##GM=lryDu_5@eB<4ca6QB0wQ*JR4kp;^vX~wAw&!M`R5b-05U>+}Gl> zV`o&T2dc0a%bn`pfr}y>{xseA9aWkw>7K@zZU5q8Hs9z66J_2nWV1rT_;X*N;N5Iv zs$u&cCxj>=81yA(aiLgQvGUdEnkBpS=SC2m*?bjf3M)*`8)Gl4Z4}^ad4^sUyIb<6 zxn!I&h`j(dm&YY#(4=Ix{?w2OV+YfEN5hSnhX(uynoI5BDriqIyfBd1B|>jrLr1t* zZOT<|)>@`SX!$wAfZVu#gMXs?=m>}3svuK0aW?WoE3#+n;IBYDKl{l_O;p#?G%w4& zGLRw@gfXU*LltfoKCA|556Ff{yr9ESxjnJ|VIyCTaGmk-HpR zaXao)?-CYg#X;T(jx+b$YizuIBC3FaLV>fo5URdyPPOZ(&7OyY&XPB0Otxw+?#RzW zV=d#&jSdHvy+5F1GVmXi&bFmSj2H=-jLCwvy;)3X7X#<*m+O8!9@yF~rA-+4IN*1K zw5?s|KeNj!rTu78-i~vg+~F8iyFc=wH`G*o4yxf8tJ$`Kta~ zldRD1cTHSv^bzDpS!Ei?nc}S_4VC}7-rkvvlDSTIchpF*YJXYx_!lpOE zB=`PWJw=XI7BU8b%4&KrxzGG-QwZ(&_Fz57abb=y22z{0RD!LQd7qeDVm^50TnoVA z7_zfWCA!@_wU8}#VpbeMMvOBsGY}u>V)1Y(gwwxs=8E5S_;NmAYD?2wL$?-gS&kj_ zoo0wwo}*mkE43C*1I&?^p>}1hYC`d%HkXS~;4~u(Lu|{?Hx!w(xE{&yE29NKD0G1i zV_2F?HN|d_ytL6zgRl_GDpqEbxmcUq?(9$(52XMlGud_Yv?A1eiTwq)Nd+cwH5 zDSZO@c}VQWX@H(!Gc{|jBK8QLCQpG)H(<)s(|ntY9S=A4t(I%CaM)}F6%-t_<=^8n zs&8{}90vzW=448%GYd7mzY!3)-J1&w>M@OFjN@>9mN@B>CV%(O^p@SiLWS9T^3o(9 zkv|=>U^X`>ZR;MUKE&mvqkOOLkrG0kt zP!x3$*+AlO2h-d7jxQ*ty@T>GDk1huqTAqEWKHgV+{5AuNvMaFAH2vr-biB5)mx)R z7+xaZJ>8aQ=V6aJUu|YeXyNJC$PjlpnXX$p{;pW4HHY{6qw*?`J{;$5Gc47Ec?sPK zqQE*OaO7i=%XQUCPmGkjecBdo$b?5ExpI6_LgTqwD#1aHj}l(7gWRimKZ0swaiR*c zbNblVr$wO>VKU?WB154%#xU3PC z>}l_(!6t{Wo{m+G$63l(^rR0R;dU~-2smc3wr@4OaVh_ttP5_IqHgly7!SYIT_zYxq{JgQSFp=j;+<78AmQ1Fz@e1|wZThR)d%yT3Ht{apq5TrgqxFi$sOJ_@>)QpHokRxyquUY! z84F?g36$r{ipr?R3rDH@iLsWHD7~#uHOOAE)cE|*?LhuFQn|P9b_vIYZMQ0I%r{!< zullMwI$8S&-XvKima$<-F2qcqVF;`rOmT#0bVk zGI-mj8f*z(D{sXXWL~Y>l1dd2$Xi2m#4&`9sTCbf3EHjmxomn-K?zR5A>jjSbUJTREao<=7g1d5VP_W0Wyf{ce) zm1tC|fha1kjU$oY$4~J+f*t8c&T@$C-WH1Zt6`PVn3nlZ#MV}{2ht-e(}rAjjtl3H zZrc9$a!&C>Sp_cN+(T<%vztT~?Y$mzaLYvJ{oxc0L2m^hl?Aa~Q$}wiOv|1vDC%~V z5X&X=@<7mf73v}YcP{ca!xP`nC>4b$#cS@fPZDt}g!8#%)DG-HzZYbXjjxRUxT!1N z+beFtPken^(BRUZK*v{*>!Eo6Vl&PY?wbFzFO8|aYZ70E0^L#X)^psf5hTMB8{^cw zyhu1+Q{vgh>3AJ4qm*NNc#XfLHS))$cAU>6=6=Ou`D@)`Ir--^-Jc4@b}G<|b4b&a z^5dcPUbJ1I5i5pVus-Un)}ckm1Qr*Q)Qj?}a!tY-60TDiA%3i=Rc8^d^hf(A45f5o zj$e)?yp#wC+GM3k^2{v0xgZ?ix>Ld)bEUv(Y%EK!IiG85!YEvxAbtcOzNK5&nPrm{ z*!lqJ`ZSH|yb(K) zZgCk=CPtG&B1aB@k6U z!Xri=2a}nM6)>hnE28ih8UaB3_ zkBt|9kZkS1j)8+cwxr~)bw05=+31V}(6xX(>5k?Fu}bP+<~SUg7{kH%`AqOie%T7I zY|Nw%hjBo1>7hBzQvayJa;9B7J@Cq;1|>oF3Hdc5R@7|i`2K_Ziq`R2sM50myPx;- z-MrZ%-P$YOl7t;Dbsz z)oyjAIuot-Gn^YUn={%@+{D+%uY~6KwMx?LNNUJm+7Jl|*H0i3PC41_7j%a^<`e-h z!M;CSN`MX4y!CRBWQHDGqth2!;~Gab?X}iA&h1p07s7yp5Jp-?Ow)(L@%jt}se9YJh7)(Z|H5k%TI61N`u?v*-_?cwan; z{8K==yr1w1&Q|Cc_(&PFBpbXGfEdqso5#TA>vRnWGvbA zd;~!hm|!#)foYC>^X4Skcjz2kj$cKX*zUgh0D6>EqxjZQFl}cdVS4lvl7t! z5ABJY!^m&Qr#WecmiiE%g;x$i4Hps63J1@ZBQnw$-LRK)Yy{iqVM~@gcR8;o1Us`A zi&n=>67O~^AZOb`MNydCFize+^`T=e+a)h3eB`pi_q8o@X@PPv-16p944PZVFEA@Ao$kNtR1dP|e?hb*JI=WjH5y#}_vHqcFCQBj^MTeGceG5B3ujBcpNubmnlA zK*;wwve;HVI+CFM31Pl+-fX56c})8tawnwTV9DfIHy>>$#=Hy}|6B^MO)D;;DjY2^ zaIC|?aD~6t8cVFlokmZ+G+ve$Fv)%Dofr1_m=k1%`2{#C2G{-JyQt^G{;XK;QgrwY zg$XF`axMCb#tSiPK32A1&*7UUQ!V@JO2wnMF%(KdTUf}tn!kLa%K$L!SEqY>Sio6- zu{PGN%{q(>wg@3(J%c~4$Z_0{7qCtR`+@jIV({&)R-l-Y!H}5kb~+iKuvJ9_ z1;9iN32y<7E)t zKPVs6X~4JcV}h`Jc^QV&s)4KhG{l{N`!NabHh*;DtK6fy8$0e2OqmpxJYE+Dm$UD} z!m;R~=18IaGKQOC;*{Ld-?OH|H|j5fT%8P=ZD)ZZ4R(Ue>+xSlC*p&6mg!V@V#c5GhVjinnz=&Tmf|dDBH&Fnuz7&m?agOU$-pVyqK|H2OPR9#*8a`j{ z=R8Cgx_oH~e-+i&Wo6WU2}Zf^1CJV}sFfaoZ_vtFS!9kA*!sb0ph`x3+#a&E(4Hgp z>i8>9nPD&S{?Kc%{aQoKoB@dhS_Q(^9Vy{Ps1iYJ0q@#DKNJukX5@s2Jl>W#zAG)| za706U?($l1+%i1gpdX5&uI^d4Td%U@XEIJ`|MRoL zdfsKYP}e=CxKA94ADABWVt3{1-T1m(m2^4g630dMKqI{oaJ4hAb>Q`#N>0+2ZlrgW zY+(DDt|7;ti8N7)%6HAxUUT(zy~1p|8qML}%P*oOvUi$hfIQCfyc00Dk@?c6Y9Q4P zJjFmH{%p;LGE;{j_(KA}e)2~H%OQy6v3#j^)fX=FIqEaZ(lxGIkIt+)tX2m4a{?A?yWi- zp>r+=NglSc{bKF2yF(36HahnLY`ep0hA8nPez&NOi>=@b(c7ASTds}h8Mj^UoLg9v z*djG+@lqWU;&|gSqMGQ(sNnPMQjH3sVTXWkb6_}M?s79ekD}+I(ol~Juq{{(_Kv;i z?G#|!3IXMqMXyeEHWuajqWZRNNs-s=8II7P6QE)buLKm~lShWI^VCw_eb>MzwQ;Zy zRVv-T*!5<&q-%Yn164l^6P0q-wD?DnyObMz^x6=sv^O`K_BZw zc|j_+*#&vt z8H2WBx0`5{lyf2p|I6K zGL@v}ad0kKS%7>H59JR>1Rj{bHKTGvAR2)7~byywzv>qT+Unn8WrBxF08-hSfZt zY!~vG0JynXp?W3B4!K68=_*+=*_zn`M&u?FKXkp^W3bM9qQ3VT_k<9!vx|o*yEhfY zX9*X_`pOoR+Z|FJ6#qtp4zM39>0|lD;lke`;KW2G5lCIOXtb7+5i)JdCw2?4hRXc$ zv{*!6y+ttK4g<`gtb3NRC8WPjtwF$xTU)#UiifHeIW)gO-v36 z0&8!y0MYu~xz>rMNWXuLKYmas18p7E{(_2LA=Nwgwk=WhI}noq^t{y>pVYOHXe za(e{viijryqx=IV=cQhL4%=}L>ha-X>;$*)x~@LU&}&>=UUfe&?x3^(~|Ny(Q-kY;FL2 z$e;(*4F`_Nx(ma|{!H4O=R zp%!={icU<~PEzaFSLYF(C4W|Fe-~wBoG3;DMswJ>>2o+=gRwjyf1u{<(l(j6zqV3Y z%z=O}Ln0JMaVXW#-!<>_GbQyWz(}hTumNqr1=rib#~oPGa4KwLp66ci^xj-_I306M zjImtHl;6Rb=pDG>pZdyZk%1gR+XB4CIqiYUaZ&lLap&kmXhC0fXENF;$y(Iu=$SH~ z*){@b{sIpE@;cnP@y-lJT~A}O+)E&WBqU6zLlU)mVAc)R zYhlJC9g2M4` zt}}oh>nRdlMaS(4ek(z8TUslm9`4-|K34n1qOtlFuacP$72GLY5cCas**CDi_JF}d zhwW$EGfHiwtz=J#I(`0h82o}CWP3F<--55ErJY)ILR@GqH~#aetQH`T180uLpBi_E zxXvt_D_;qCWw$ftiJ&Yeac=`CtEq!1MIoSlKN=9+ZX!lluA(sNuAqeu@Y|Hx z5qApD&9czIBLkP)pef1ygYUn6!_T^3-jr}G1bT^1lksPT_VwJLe5C=8eZOEa5ZxvS zNOcuuMyU?Cgdb8tP zGBw>$R1L!~q#=c(|htf>|*)afQID!)~v!^uT=NYk|A)#VBK_Z!j{|1S+8mfm~_0{%dApU22A6$ zflPTFir*Q!Df6=j514rw-0$4-5=BmaY>O((uih*1@O)-IEBw!gz;hnImbo~w5+8%I z9*|NT#G~?U?*Mex@U?Q{84+TWVu-ueP@=3~N5C9XnO5lSnzkY}w;i5m9lrEmy>|cB zTP*xm=iihww)O3T{}!7$mA5Wu(rUwOp^f1Gk@b$@l{VWJXvenQ>DV1>#T{eCHafQ5 zv2EM7&5mu`HgER6=j{9KbAP_i`nBdWt7^G=aU=g88*7 zseJgzzLloIdcwJ8b^#AP1KSGMd=kf3XV<_pYV0SKH33&l0MXq`jYbQf#0vH3c|sVm zA{zueHgIXG6FXDWV;0hLy|=qriT1Zkta$G(Ucal&<+^I~Azlx_WtUkde`QcJGwBvO zh>}^-Pn+2GZ?xj44a5`~F#@Q-X?|$3mJ&(5A`YKW`xr~W6 z$~OuIp*#V!-HZ1{{5#W&5FBTOeihO2YHo_1{j(NnNTe4>8Y7-Tr1T&UtNKw#MNmBH zDKW{VEdH`5Rn&~+FbNnuIzc_+FhHipg<>SIW~Ta1EPic>fp8#=S9CFQ z@&%)QH}b_83;Fg`g##+lUn^v~Ts)f}<29jTGB6hPtk?}X@Y^gZFx<7CO5x-C6EI$7 zO(Fx^H(x>wa%Y4Su*R}7J3q=SY8Bh`{^h~#`U@P+B4KT%;#EQ|B63~_b?Z`O{mW`O zNXxXTbP^Bl!xx2T!)XnHzJm4L){8h?(uDQ{BAN5CoZr#-|K*uThXA$M>lR^wJh8cb zjSjly5qZ_ie0iC8q+7|ENw=j!EZVig4?2D4vdnT%#9!nYOF<%&z4a?&DlIx}Uy2SI zooWkGdX{(9>}2M-EHmiftuzP~d{?>Pvm>Z~oAX&EYI3s4T*;BxRS#HF!(8-JIiEl6 zNxACOA2=R1wJhYCPp_+)@f}!vwHYTexBE%H@Cpey$k2?Q|azT8p!qo8c+8vjLg_bFq91I*Pzh#8hLy#wW~v-45Zk3~n?eWq&YDP?9n@=t zMBeA3VDFjyG@iw}?}WV{n5oQB9UW9Y>U;qQ=TA5HqPDLzItGYzXEnNFSos8+K{6d8 zGo`a`7cY%#EhDL3V{|2U@p)X;++^HF<6_k5Nk|$rwcO8+>VJ2$j-4kU+75DfLb4BF z!&*sAml46MS?P)jU+{)KrX_Z0o5mb&&W6>w;0cbChS5OU^EzEH&atT#JRkeEgH+B+ss)tdjMI7Nv|CiaJJBwVFjHTsqNOiCDymZH| zOW`uBSI67OO|`g4drB_%H;fH2kqPE{$v+R$OG zc>yu7L*~NLW^)nFh>xdZL=sq>bl^2+yBFy@8L;v%p=GE6vo#V5@Zhlph`?ECp zOLTIA<-?5m?Ve$}Hy|wjX|ZULANA9#=YBD==azxjkInM0+0d~AgcdN*nscc^XIJvC zo`=vL5)#_QS_|4n<}L zci6Db8-_*7aqw~-n%6m3bZU;4HvM*#$`hGfzRRmJx(8F*0n{6|i3Ix+h?ZCaH8G0c z;5~}UxBtlk;5|{U+??Pvct7sec)6-lDZiPD7G@P7(6E^PBHvT#u2QbOS^DMEUVXed zxh^kgF-)D+`J7sw)W_Uyc?PAuv2R}-d%&7B;emYC;OrEuAB&v=+Seq|ri)Stjui{a zUd3(>0)xQQ=#M8>=$mVmTp2gPABYyJyg@(pK8Vv@vXjBPb!#O^ z1!8AX0PXXzjjbtqx?&tGudTQ#tpq1gPpvXE28WV#`X=&8!=3K3l@gzu@nPJZOR#Bum7Ui>^i48@ z5e{=M(xQ{id;Ux)@%c9o9p=slQBUk=j<`<$;IRC1GA~j=MQd#Hhga1V4yvXKT^Z+h z01}lM;pCKkIsb~*c%SP1(*AWQTScN4Sj)`Sm9#cJUFFvsi=LX-fQBnNP|RU*tg3>g z2r)wRPoI%bonZGpy@#?L_fU#W*F7!z_QpLg+;YY%tyk4_7nawBIVBmi&n&r($fXNy zeGPZ@Cwn-7I~rG<+Q{IzIiubi=~()GBkHGW^XzVouNOHxyG)F8EC{upTofZRuvFsrK(@Y#*)oZHV~YO`o2ao|P?r zze`Bo{G(a8S3>$D&@PCX8zya!-zT>Z&x4}VGJ^bhR*$7u^Kus{hqmj9)_5dD$9xyYNO3f%wQF+rJ+w!Sn$$+^ILA2NVOD>I->z~kg zaP8EgjY?3CZ*l}Mp}jSLBQCQUorjqfw$7?@#*n*O}esy}=>MyC0~mW%B0FH3fMG zi_T9N&F20nXh9pHpY-Z}@#UKNM3Vhck{>}CFw3RB?16ur{xKj{*Tc2WH1i z9-ydvpNKGjIo07mP43P)!ymc^%`eA$64zf%<)T}#c}n?sm*MNPuQy&CA<#UFhR>Jk zF`;6aqbK9oZ_v7xef`X_z}U6LZ|g-$IE?MOlwtOx{;4ARpDcnoNzn4NH{v0NBia!) z+Jt02ZyyPBGMe1sY!@S#rD{EY)Q<_pyRtH-H>Of;U-6}F4{HK;%;#r_=Bz|y3P;fW zeT;g3F-Fzz=+2oil=}fJZBNSipr*NaaNcjX(Y&^MMM+B#yKkg=mGHPY}P6T5ytp?nvS~U4NP}d9HR^9fRF2P}W>< zO8n^AvAN|L3Q52MBU{jz<(aR#>mGNyChh#KTr3Z`mp*5iIaDvwXMH{LG#+}B7& zXNkJoRfh8h5T5L|0dH3q9B1?tVLTKgiC|Sd6A8E1V}0y;Vvma)Li=3nAE1fw9a865 zoLWy0X(>sj2UBKOa z%&x5vsG;rP0uW39n=fSzehNZbV#CM;e=iZkr=Ft=HO?drEg@Q#hTmN~ak3k8NaMf6J>p5i=<^IIJ*`6f}YyG~r)-r4kBrITm zXt+?8D#y*`yn=wdO)ugwqaS7M->`zFTFVg%83`tNye((srxp#ZWK%#gPBd!S3#7xPa9ReYop^#+dpGK;M&! zlMxG~`hgSqzcyHqfBQB!Q@(ZRoLVmxEy8;ypX;ed);gvsA5B*vHCk;3Px8Lw8(6ob zY!I+=C)(eGmL%!O$_5i9(hF5>@Juz^8n{eP@zTF)Cb+vfOvscUW_qGb@;=6oSUh<1 z_4kXTX)Z+Ew|c%5?4jdZJiAhAzN5ize)6d*U;N2Ipo7$#@FNl5?XLy*4ktK5V}~9T zvHdA5(ua=C@2mY2(2LjoWxD|68R+H{=L5;c2%*^!P_- zB+&kxjVzeb^i}q^X%E&#-^=_nuoVWG<3sxW+BtO+{kXm*g(D^dsGkJm1p)#Mk0>ZB z`)yoH^cFlZF+2K1O=4foQUrV!d1?6S1g@)FK5#D{UcI^xvMV|;*b>|BB&Tp9w<4aN zy$O2bLjW44l>TnD;`iRcG$@lc2sA?B_JpRAC>c{Ek*Gil1_(=kGb7!doOW_V-VpG6 zvmU%YiI7RZD+sIv4T^+R$NZEw>a_{th{wq29$xXy%%rrfHslP`?Q!g_;IBMv@K>A| zA$$)%L6p7AilulgtnBC!xV*P^K*Wey)Tz8U4SP|Man6_~3=$Vm0AwyO8kst7M|SvFQ3Sgq>e}jGp1&0W`Pw;9qEjyc5H)cnTCK@bK)1V#HR_}OupPsMX^*Ie zI>yr8xwNv!mvtSY2OSj_P3*e3NR{um+1m@!8V%EcbZ?~16V(CHHY}GPt)_t7d$Ia^gh>9^NE8_& zs0bD$0mVNW;47@I&)EX;Z19OfPit2#(C}d*crVRu!$+kqXP0=`f?1{5v>&!wfOp@i z`T(}MR%%7K-PWME;R<}Ld>!};l#JI>`g)?xN8IwaE7P%*XD|^DJNEW3u~ajg<#H}H zM#x{I0Yj@%@|bjn3^3kB{3`LVQWw#Mib2uM`fg`#YqaaC(9E;{f7%K?2dK9g%I;Ro8?NbZvJmF-LWO}>2Ax0O?&AO2obM>d2Ne^nTdd-;q!`p)#&2US9FGph8qqb z;l=$YiO1TktrC!?LZQ7_3MhQM5K)b_M{?IwwDtZQKRico#5d*;RB^#w+mLZT9qYuwPc9>CWaQG-Z*(LRRQ5df5aE(!oTQ zS($9fY3SkX+%Nn_-@V%$dZ9>QfD+9D&Y-R4VNA=Fs&)_zFFBFFn+>5lo$G06k&32K zrPjgAm7Gj>pDkxuYNfg-rC`Hpz$_;kr8f_f%CnovxTZP20#_UJrP*_t2`-Tm5`7e& zup#A(IKj}g3mV_=lTh*F!J^AS=8_H#^~bgC@!br;@}q z%36;Om$zLLPD(uTna8EQX-7n=HG#tw6ZV?po zKaNx^Zf2L}sZ|00(1d-t_y3Jiwyb5gkYN6}X;{|Bn(Ozlfa9t{5~njl!$#xK(@ zxPLm-FO!gCcroU4ekXg*XXtZei3uVv!}_#WEMhj@7eCbz{k``4Bi$A+{rn-^%zukU zv`q`C0ZzdKNX)imSNCN#6Cz7l(wLdiUGBh0%t(`MG3UWIReG*@K)D&Df$ns0NjThe zx!gN*WlUee$_VuieQ2HN@{)#RAB<0O(u-B0G-1>mms34YrwRs`KWEWa(v;02*KkCs z<8U)`%m<>j^bf0FpX}Ve?whom{}wqmtNHkLUjGby%f26*FRcdqiDs;wP8vwYVhwk5TzX>A9{~^%^OC*Ir%OU?uTg(Vt2v=s3my6i%4dDRed3zPYhOpYia+!ATh|oL7c} z4MUa|`VO13pR{)hd)5IA9lp=se%vJmwcVcrP6H*-p>d z(#C(r4G?exQ|%`g7dh3^-FM>771U_?iv#%AcANLBlJaO@X$_ZEBoaM?9{N1)p#SyZ zdiT>p1FKsEj+=6o5_jJ$IKn?i4h|dFy>|=moBM?AlhauMqydFVfh>0k$OYL)#lyY5 ztqhIjU7=oXg;qN@4)>u5cXXFG9H>XfmFOq^d}Td_{;mdBM;R@)d4~DTR2SFbK5n_z zblru$9l3D_alD@qxJX%9d3^>JGWncM&!KZZBx0=llJYy<(~ml)o2Q3`ju|@xw24n=fm$CTNyf32KoilRLVg7AURwS_*1kG-x z;;h>d#{Q^`gkZ|EXI+-6z0r`M`9k;GMXFt<2&`1tmg;Q6WlxC3^@=ki#iW`kmRCr3 zg2)w-Cj!de6n3>Xo9~aM2T^VNY*NV_gq|;Vfl)}(hnBAo8LHQ_66I>|aLN_W(`tCxYH-& z2oE*s{<ZBC;XZO$@o;Eiw-&3**GFXZh_kdMF zc?w>YRpNWl8*)8)UKkU7JalnfNCE5r0tvDd|JqYswK=q(z$kN~P+lL-RcOH6-Q=Ar z8SFFm=kb7@wpR#we-0kqtq*7c&vUyb>$xN$x}G(TuaI|VD@Y7oPq2{iCs7L(8q>(C zz?Wpo2st&gWm7RTFE0?b^37n!(}i8X&rVT6Jt2qFbx7vTG#{c)4Ij>|vHs{Vs4}~| zn+b|Ps9b!%>yXOp&6umQM`JkNF~HfiPUQAfHje^0qh&75M{~q@4l`Wz#ESil$1=Qf`}-kYBxf z^O8a1^ULd;<9d5=r&a2}Gj?}cc;Gkjo~$u?DDud?-02Q@*K9q!%#@)hem&M$*V)XZ zB|=<0RZ!7%?uwfiU{KgRF?BiBu59b-)KN)IJ-*|(a6R^N(|lw*(sT2O%v)Ud)Ojp3 zAH8-cIvg5$@ftrr4m`egDO*&V40}JZb8{K5AZ()dUI#|?t)_Z)IhPkXMipJ&Dmzo` z77f{2Qxq*XTBenqyAjs@BOT_;zp%xxYw*2}3BGCl9a~+*O~3CXtH$ws^67XLg7R83 z?06SamaoihGm>Y`O>*wB1aR5-4KFo@qQ3mauqw_^Ysd!=2u1vH7 zaLhA-k+FH$jIyVBpZ925q%wY@v_JmbCc4vFLsU?FphE+7{T0r+r3n6!)p7eRJojkl ziLV1|#>XG|`<*tLAcOCWaB47gaA~GiW9=!==SGhQ-qydZ#J=tEvArWT{T|01u{y`Szk1wBz`lQj zA;Uku7RGqd89H`scQ|GuZ-1wUaZ*5;sVDoIsYR6}dk?dug(frCQB)bLhf8bul}qH2 z-5ySSdifme# zzn3n$Nx69g3MV55mis7TOif_iFT54FwB4DR$q0v{vP5t> z;g&euveoBDzUcM}_tX-L^0kGN-MpTNS`GA%a?gtjx zC290Joxu(_%U4$qfpJ>&KahTXr>BGB1yPbM!GEN=9r%S^Yy0rTs3|28L^jTNxpgFS zJC_ttCnR7!bZacLwKl-97u|ilOBBw}mx`wy^DTCFH#$BQ5f4hm#cfJIeT@a?YwJZ1 z>qdb2bAU+``xRZDqrKF1_-!JcJJjftRt5CCbMIYRpzBMUkxrEcJ`50T(-j!vU7=pz z_YzTdJ>2(xy@FSjEteNqcv6a2dR%-(U~Ej`&xRy0uFje;w7Kg?BxF;tQY_no^MZ6F znLBam((KG|ZyuoSwVf!jb%w#-y_ODGYOV%1&S}R`dn;3@%fVe4-Pz1X;vLM3Lg45B z(^X}3YqLEc9rTeD zG3@`kz~4 z8;;2j`_9SN?+j+}gd=mkOSV;dcX98s3rhBc!MyD_I>1-lFt1OX?HUnMR!D<&U*J?} zA(p3_L@!YtZ(n}O>9q6Kcg4rn@Q^;8ArubcW+%FEHv`bXyYnxxcdnDiCGOap9y#Vj zT120SyVfgDgg{?lo<@*}O*U*Hg=X&~18>OkP{q|un2@gfHf5%l6|MZXkJ~v>cYYLk zj(r~getfLmHn`Wzf%#jfA&Bxhhf(Wq`^8Ivti=0<58#RsZO%>Mr!`xmK(?QOllY#32%20&^d@1hz87%#-t_tT0gQZK6sell#IhZdfPqQ z8Mmf+Rc{`NOjbDH8Nyu&?@#81(;6$3LYlOWs9UuH22UI%fWV=6Aa5VmET5>Vi8|hr~_(hRAp!2uzavbhWJ~t&WIL?B^rcJQcQh2o=lPBW=d3iYwI(Y!|-6_TRY! zyp|C8*XPPK*8}t$)Lm1@joRw#hv~qZSj)JQ-it96H`oR9>=T3Gk^JpmW8M7=5E>-l z`NRZcz_Kjo$1Z!^h}xT*4JI0%0o#FfE93w>8C5{Hr{va1SI#S@Ka1vnCJIVcaK$8Y z*0coy!^03Mix*=U<&QSCdA?>nj3u0|ElsXYxGD$JFjvNII3mxW3rC)-*>I$*X-U7W zBQ=m+s3u=x9>UR1f}qDPZNCqLje)}s?mpgK0NWV=b_TnF|3Q9ta(CDVA(_<^Y~mN^ z&T^8@uZne=noC34Oz%4Z7e$UxA3Y^xcM}3_lPN?=O5;O{&3=_AoZxdfmZI)z>zJ zPF1NskUlH{i_d)=Hc#Kza^+1^X$)2kzkFW&HPCC--%P9;42BS0sui3q)~oT9{_dWq zY%}N{oLQS-to^25R`QK?J^0ks!BWDU_yi;Jc%pZh>~f~?`h%t81`-*GNRDzQ5*T;v z8$sV0&|ngIzUq{%AV##rou4Dvq6^@=F?y>pq4%X$QrvJ(JR9=F^ZjZU2gvr(=O5%R z_#5u-Oy$G(s|yp5LqJj8(ZmX97{)G{M6=|Z3gy48;S@|0!n-r<3Nelc@a15BCQnrq zu$YZb)nq^cL_m4+p-y{QpS0#o z+OVApN%YA7rIl=WKp|t_{=gd1>prLu=_e>~Dbv0qGA?6Wz3r)CFUi>lXESyzbvWgG zWjMBm4-^we<-|4-ob3ShFS596+(&WKiI!lsSz=G-GEH2bS9>#FH0RCdg*hJwO*S_8 zZ^O(91J9gv4KDMk59Hq&X)`||>3+!*P_QxdrcFNfU&k$cC0yZy^-}Af@9@PxspOr; zxx5cxO_c^>t;`|uOG&|#d(?gFqI)CASY7mE4{>J8+?U(vMbsAPxL%F_Yof6VXHU@~ z4!WhRDbogxs|U}X1W!tynN#a2+cOU)FmIl#EzH0IV;N$R%8>c`t3(d!`qlTCdc0qc zgfW|YoFYrs&wq#$`;*3%mC!u~yc^PF#gr6N*%SO?-~3N&qk6PV0*b$dKmRoNm|{{c zTIx{kaKAp5J4mGaDQorCzV)RasrpDeCqjVM-J-t3$=f#z@wI6`ddQO@GuTP)r=Zd> z`!7H~l-&a+mj$?WS-Ol9${IYv5=rUr!bADEYdq4(fFoizUBULQe1@r5t^a`}jtcH< zHK;P%)#FYimEW>qfzjowpJBDX&X(5wUbCeebQQdAhJuXwnG6%8 zVOm$xctSx7R<5bJHAf5YG;j99wNJgj>)~fC2@3?5!; zJCJI`Qdqp3P6v3zK)R4XYV6aYSLcqWqIuPgfUl7FIB?jFs|&-w@`?W$d%lBPh5P}D zv}qXW-N4A&JyhV&!2P%?UBLt*oFM5Kn_I$?p$^d#!a)JvW$9=Vf3>MyIoNk!TXM(9 z6nr|~{Tv)J>YE&eX;@+FD`<#Oy*>xG&}oAddVLgZYpjs{Qq-ukKzb5(Oc1nw#oh{y zKVz-2z!MWd>U~r$ivhz5pTW>j(vtJ-Sjw~z8cJ`j^y~(0-~`j610}v`dVVnJGsv~j z1Kw5Gav#qIMsJI9h4Q|u|A;>Tb8u96RxstF&Wwe%5#j7QsqgsH=kDFtx4yVtLo75^ zV~NL7q9I%$9p2&p#LTkLYzHmlZ#{UXqaPjP0s|FkUPek3-r^50p;q8!?^Z%v*u+gq zlROenKrD+3v(_$|fa)>K%}yH+-bE&%YG-Hrt79c%{s)?YNLP4;M$ds!w0)>!nC}*x7TULUaLYU;3N-g zS7)cYdjKDFa)-Q=)9JP~nfx&``S&3bZL5n&TR4@W0{n|5_%9CBD&bqi*pJCzFs%2h z?J5cz!#g3#Uq>CTnSjo(J)xWZdSKgR`8&3PmuBHgT3klZubCHLKsmBK0JO|R zX%B%LG20`p#>HugU-6zQuU|N|IrJp|7`#;1^^0D~UIMoo0gJAs?-6jvp+)ZZb;Z`FuEAjqHp9gnGTdmNMPFB)Isz3^11pFOVL&@^E3(=je%j zSGieR!d{xUE)4`-O|%Wt;)hUeHR(c5aM8M`7kCeGOB~*WiI>WZs7m$ zL{FQLjt2S(;=}zMbVbAh3`Gb1->9bxEzlG5@Blh|&5>-g3VN60iX7~h3nUlIDUP`c zWf&wj7~h5jG7m8EbnaJ%Mu_`vhB%7s+NL})#S&GhBd-8f%Ps3cJCU(g$j_}7?^8xo(QIBu zCxV(JVPAVF8=qP<`A305A4NE*fqJPg0H5Q=&eWqSfG0!zSLDr|0W)9$_!|3rgQvFE zS*Q(UGFN;k(?xGm`R(JBwtS_Xz#7q=h{vr#G`onoJ?-15b9#5&Ca@n*Xq9RRbsUn8 z)HME6?0l|#^pQW1P5^oh*(VM2TMN2l9dS-j6JYOAw*Sjk|1Kq!d4n>r62q=bfX|5< zG6adBm~dl{AksH=0$r*Z6PU-<{dYPfuy`hR^Db(DM~6r!HE$vjaHA>qGrt9VBq@kz z9Ld<&8p!?wYgSeXQ;@@^Wf87*Dj_8$pg{6d?l00gw~wAsrpT8>u`21kaJR*3jl9!` z_kApm^qW`Imby=JmknYdpYX*5l0&oUDtoM4mq@ztlY-Fp#f7r|Mx^k9P#2p=g~=v; zF|0=xfgeD9sV}zD-G|@wDKxdRFtdq2x9zh#+iSg*=YEcF=*4&=`2GR?cYUc=96(*{ z02Flw;p+ZAz>m(;Hrr*kfjSqG{l;0{A14GAbdgkcZE}YdLCpUj2gL^%=v&6dL=0?W zt&lR;cnr9+gZmrRoyH4TW08p7Owy-N0+8az(kuqZ{fhjXHFUUE6oh2rZb;_&w$$d| zIwpDBV>{S(Fq*#c9}zD*qT8C4pd(7Ljj;NI{<4r`062 zD7Z>?1HWUbt|7opg_YUm?`5+ z4Pa6Feeaf_h$Fl*JpkyWxy>cI+tK}Qs~Glmnd=lvLW!X^p@r<)rk>*+xN8t;pz%aC zhhQ3DXGNNhCQo1;!k!h5_tc6#Q*)4%bbSQ}dB*!phpXry@9jMB439w}9kQqazo7zt z5%4DZHB^S`@@|DPYxNofJ&8m^BVc1PSiP*v2@KW`F{7@fOMNfSMT7e|!(#~w2U z^BO&W-eyE#-%MDvv)$>nKl}!RKavH5GoBQ{RM(#zXec($3N%GH*~J6b?>_>m9_0*j zo&EgAeR&U40f~e~@AIN_QsiNszIqLls{9&+?|8CvxZGt&x&rSB#iOwx`0z$vHlkk64y-!_eV2wW7;^1 zo$z10sV}4h7%r<@zBO$qp2DmSLhjkJM`wY-oUF`ofNN+tL;AO^O|7FyW@4D`w}nwyVf#4 zR4;>7O?LMiU5~3AnXj!S99QXdC#y?Ef4G_g8DFztNnfI-tZ>s&2@=c|xbkD%h7b($ zK`ArAt6m6%srqgmpVzv;krsfb}W7vhfDPQZ)jp;jaqH{FmQSqC4^rQsavA?4C(RL`S3pHq+%SfkC#w7OQ-v7LST5Wth0CRYlvIz^f->>+qM5enrOA zZIShs>l3X##D;0Dwijcv}h>EGKK2K3^==Lepxy`}LFw zjUT!O1n7U6)`t12P1v=qKw9EwN96F$7RpN+7f~gZ_RhVS(MqhGhf-jFAGcBEH*bwdEiM-XV)Cv4PmrSp14Ky_HJ}?5DKGzpUSnjCJR-YdbHvqH()-Lr+n2-SgPZPRaF8 zj~uOcycbRa>6?B=v6MryYsoooxjDQxPIIc%KH`3%GpQS^gRu%BOLcTt>|n|BSb%g? zzmu)9z`Yoy0n?aO)LmFuk=2vQ87Y9@5MX88Ye^mcD+W~o2a0F+b4Sk74f-5_oRe`+ zVp;Y`0h^gX^7T(ENuxiL9E`@K32%uiy=JGRuWG8QqTG%T?<_}^>C{HE0W0GX)~dAN zwb9+HXhXmsKX#B^kqNVj+lk675Kh_i$VO(T^NiWsuU|C2n0>LQ=jT-aU+ayX($dm> zfvjoUGM$q(w)~@sba%z8o%@aUSGTu9af1=nhR0baPg_3FEJz3eWYTAD4V1aDYjbJW zgw|u`e);|AEX4#8nJI&tp5gZ`RJ*YyVxV__8JIqlf1vG7ZXFWI?6izK9>+-dR2tB? zED(hPV7V_ti}APeGAJh7iVcEL(k!D+=KZ$&f$!z$_y7m+2VRWr)99f0Hm9At08*D% z72#bG$E^z!X>aZ`y3zZht`;zN6t632`3eBaN;cCBlj%MqR_N{_Cj^`Xn)mjPkb4Rn za&pX+Wo77MMGS-!r2w7^{J?Z{{G~R6vWF%x2#RJ(zcXSP=TKcxU(akJNsp zd0oJ)cM0P0bYAp(b|&`RTwv3|-m|AJ=GCR2nMIq|vy)H7CA6?KfPyMGHoc#It!$sa zRi7>f(A-OB)W-7MduHq;h&zs`LY|*KKWlh;O{+7AFXu_Iu{GYw&;LxwIb@DN%q8?` z=t+S09tj&)r3{aB-J^I~|+R04LFf=)o zV#2Ym+NWGWT?nOLdZy!}^d+7h>0fAwCT`!8#$Z0Q0sG2?uR%;qJfY9&Nu+Eu{5e@| z?;6P8TAF*Ygin%_4}<>jOkcvJ|6tum?;vzL(n%gi#TdkxsJ!;@W`@$~)rvR#N;E9i zTZ?q6?=mKa|9oz*cwa4Ib)zQbS;L43#T%7so5tuzK0e=PP(QO2Kkc`*+uP(TRj%<9 z&GIGuk5NIQ%XdmNFcnMD_iv$QEU5>V*;!j0hB||LF&S6ezBUfWD+o2OmtSqaN4iI{_=JXfth4pnCTzId;LRgf zcAHqar<$ZKSrt{sd5RNAF9oy3K${dC7;1Gx{=aF5Tyr^Uepl+S1!xKQ zdnk2yoZe}D5_fc62xu%;M1dOxY3YgR@bRq9@NcOb zdv~4Q2~X_XZTnwQyVkTD9{6#YCB7eM#yflaQeLfT7f^#Mqax6nX@AxxE5}9j|HG5` zxJUhwo1-b+?6Ey5Xv&P5?<7Gu^aRb4N%Ixx=wrF$GEnC+)zgUMkDTf$w^`G5<2I(WwVE^pr32 zMPpx!=A4UtJ=23PBPyJ$?YGiAGg>_*)wFHM&+lq9AA0bU!|f>F3>boy_erYQUA;Ws zI`sq^vcE+RGbLJY2!^vqljONgy70kCFrdmDC!4n<>31Dss_eXk$ipXha>P%h!kjuEaxcC)FU{PQ_ z#h4tQj7UZq=?&=eUeh5>l9gv9OV%17G%F>`*^y}1fT zb=KMW0&aD>TKiwOiEG~A65~3Z^spxzKL5JRVgHccr(CvhmDMlW~$d#QO4>!1;=$5I;W)UYB|6qa4US2 z9rSu$?`p)xc-KIIP%29ETQDwgl#jH@IB=7XUUbE|Mn#}XGzodc&GCu;?0yBT9wRR~ zPNKR4e@XYEoh@VzAi7JL*R)28?M$q+UyOcAN_WDgy)CGz?GZnmiI+bgGSGfF(^TB0 z1);^zOi@;Mh_{axLU^gdj*X6!!Lhh9XpqV=vBw?e0{pLqT7RT~3*kW-=f_bnY!#A& z9T=kirwu{=-$RAc74+E?V*I&qPZBxoU!@rbmx%aQEHoF;t%-`MshNKL2A5auHV>I! zQ1F95ZmHF%>`PlB0Jcst>QQXz;Q^?vvxhAw&0puHz_M}<(f^D@9bO!k z$X5M`uI`b-U+%(ngaZ626}U*%sYr(m)~F0ng?d~nE;Z;@F;_sl|+!9^Jf z@*cw28waHKka1f2qf3CkQhTEC{_gf)PS^VEU=?Nnw;n2W%Tjc+7|kEKlx0`8;+?97 zC}H5gdf^`z?flU3v>+FflK)aXe2~eWfb+l0*RT^H!8|-NL=|P>gcfDGq9)KN59RWG ziacJdZ*3n7^g<)yG-hvyXj7*FQ~1?e?pTQ2`UzqZ!Uld0w>Gok+c6h2qZIa*|-;Y2+3eh1Bftny*!p1iKsowe6cShP4l z)3tPfk!rSqT~!gzR`$yz2hp4lh{dI~(5t#JgQOoE)SrdSeidW532h+vi4HeB7xh4` zLOg)RYfMRq43j(jBf&dj9vyUTrNd4nX(|wF$;>M-YnJ~(3|2PkIyKUps2JsO9nH6G zvFH$&6Pk3LFQUl>))m@-qOSGYmTy?t4Zj~20uyxhUsd|w4@wkJk>%`P7VEYot+G?O zO4teKGWvc5AF3=!I4OGzzXY%h?+T{YM>uw$rWHs#O$E~>94I!zeGwSmdVB&#sadIu zA><@lpFC`ja1*6>rdfCP@1)VD_@dY?v!lad4iR^E@A=cF%&3*)-s=4^6J3YPz8=KG z3#mIqwMhirZg>_lbS{B`bE?O&XC|h8BI387Zii(Z?q*4&3==zdpFJr&8T2$ki_+DX zDElZH;G&*w2E$wTk3HYoa-5*jOS)7AH zvh86XFb-!b;Slea&5A_PjR1ZW@UhSDpdt#dyl<0NFmfYNGN&mNeE2?E;@*WtDwyFzaA`ldRt^bQJ&OMvzj^~ z9&<6~@N#ozF{`(P+x%npWN|MJpY00Dz`!L^rd(%p5$q=Io6pvS8;@|@nkJ*@t{f@h*$K%VL9&6PWAYP*B0_q2b>S^fMd=|2bE zZ2*KH*nMBgbIai6!0gKm8u=?{`NfZECjEKKw$=I-`+wH#S2uQpOXb~4up@%4t*_fW zNX46peB~bd94MBng7|pNi+67cgzPakFyMoLfI!TwO@L_yhlhtN4*b&Lh^$UD*g85= z-5Cgl(hVV@YZQ3r!s6~@MT>18!eU||#AE-6=n>Kpz+vZ>7_ZHeyVfmOY=wW}yTznt zz3`Dtird;j^Cy=!BYqC(#7?k?sGUgtW+HBXX{5R4PY9$N46c^oaQ%)?6$s)|C(p`K zsiRbJ86$f8s@D3XH!Mk%t$pgnA^K3BF`6yV=TL35G&3 z>^B*!3|qMg{b2-u^r6d6sv7T8M6$SvqZQa0VR`Sq-MjCy^a|Odv~F2Gz1IVx>Q)QV zOA<-U$%=<41NPIdoxx0TArL$Xf5;p(5&A!)#J{U(vx%xZ1o1rKxvA}C`s69KMTqjv zRn@YSLti)yHW`XL_5UI3oB}J`wsjp=oK)kI>5r-{j_|+|tHS1H zVvAL(qSSu}hJb2d9Z4}()tzs3fvK+TJ+SRYb;*VM$7W6y^7^!l(Cv76sKOYYEqcdC`RZ-CNF`e>*_!-~s1ifHOe4xQ#msDGxy zn}0ph_=>nu_Ly1sC8n;cE)vkD{UZYkRf0JzD_xCKRCLgVyV3sm^vA?f)EjFf^O1D` zGzM<8(>P%6sOaG1y{iMnPBG%KX;i;Z8G<(D2xk6N;znI*Ukjj{@^#*#QYpFYT zKtRArI(E8NMzz(^xAT=c2-E-&C2Yx13D=M@FI4 z()vG-pK}B07A!(2i59sn3%}25>5PG2w-j1VALw)4wQj%FGliO~q)yM>zPx+o6%UEy zWVzfA#)yUM$yLIi$M4JFgOpn8;e$8kWOa>Z))rEyav?SlDS(x@)!Q)XT8~i&tv39; zb8pCgOh9R)8jCgf=j#3eR(vJ>bvwIPWA>sy{^G*fis1GQfu&@kYp|@-?E-Mkm2zx*q<7-P?#YJa(bqJmsm~>YwS7=9K~f0DhJ?-rrc&a+q>UT8 z;n76UEKehu_R0(xAg%b}#7U_K)p_FlbcHZvqQJ`^C+w8)V7<{cd#yv&x@1l*Q7uaE zM2Gl;AaZ~go|Po#B;}k7kPK1@Tz~_M1T(g16)uVrzn4-$keDyV_?uGu|86egDnXI5LVy_))*`W^hYyuK54$!R1D6nekR9Kk|IGqm%{kJh zlNiygdM9Xo>NARAf&O3T65vSvYn#bN&c+L|hZP;VyukK#zSd zTq1)EpaH4<*5!{3S^IoA(yzC~ENox>#>)aLG7K8M_pHR=Kk#sJc3`>Lj=8sm8B!S8 z7ZW65y4DE?@K2Sv`{lbxdIAk9dr#voS-s}bM z+Izz{miiUb5ukhc_l^Ac%Zi%MpvbENEQyUY`I?*;O#9Qhl~U5dthbu8+wejaWg*w2!NO>O zPHk1mIo)paIV>?3(aM%82Iu;+1`4Ip7TD1+>!CiXj@I%u+G}XSEiAJ-z5v6}{p_l` z0(@(GelI(K;I8om#u!gjG8CWoYx^!PVsIILA4np=ZrQ)L3}V&6j$U4d8Pyj1?c4fu zBte&Mq(=reH>Yc&#_Pzq{qFG>lhxvV>1)Dq=ozIriFzsqBbhDt_5eiK#Eu6s!#(cJx`(^tAIbdQi~zYlP1(IZq8}Ro9@hEs8iVnS?#{gq`gol~P&sk)ygl^O zwp%v>>x24qp$x{u!+7HZ2guH2YJ&C!DeoHr+RuZ`Ej7=6p!ukK}EK)JZrQp+)?xJ!GDuQt6Qzh z8WivB4dpgQstlfUcf7`yTCMXtFz_eo)j+N#2RlbyiKSLo7;8P!ks@`5g zxVz3z$iB>s-zZFdxhzsUk{N)i#Q^H&z&zZrGGGv+@&yIcFwyag}s--`3yH|{+TH99V-B|GnLG4f-# zZ)f%qW^&!IB}ou}?r!TzzCLpUs+(K;kp(b*mLdm799U%~NdreO3F+|>;7mtb(|R(+ zfVh%#_h+b28mo;QjL-4O#F^r9CofbG4#=3wKp~vQ^*Cdn++aWmjfG&=3OvMyf+Q}+ z7F$@U{YAH0cvzRSbZ846@ZQm``v`Qn$<*p!C#CKEa&K^-}aA6;_O%0MBArZL1b24DSZ}uYI~(p!nX*O)qkB{mrx+A)Ij!S z3Ez!Ta!wbj!vDf*TyWM83?B)Tlb^#quN-9+t9*5W&a_Hds59lWb9J=OgWyU_OOFP# z^Iv?6ud>ZADxk6{4($JBxzz}Yy=c?Vx+SfmCXm+9pJ;{cM-GZMt^@gaYye!q27W(BrE6rN?Bx{#{4%4O**>8|>@$VBxd}{B-0rr6$!fT##_# z1|V+mWcK=I{KcTKvRZdX{_>@j--6imgNf>UiP)jcsUbL`cG}24z%YRsy-@1H{>MF>tFCuB7d4q1r{^d=>fQ#s)N)haTrI zC9%`#qVi3vtFgFdKRlo%uw#WWHUt@z{vbXe4-kGx)0@AWnuU3eIs9hGKfShw z4e*|J4i5`YhcZb4p0sV0cKeS}9UtnR_8^CCARQ$U?d!c9Y3|{+Yp?3$A*a1;Mg<6= zcBM*9%qcErd8kY$3+CLQU&4gk+_+9=s?1w`b#--!SuwuNV@Wl;TMHO>yLYDR`G_rc zci$}>`V=cEHK=q3P=2$@P`x!tZlf*VZu$Nn=~f`07}E6H2t1EpG16+Vr>SXw;4Lim zhF}-IUC9bi>2QDXdb>QZSG+UD~dwT*BbG|D;4lc>&bZ9wBRf0 zJO?v(=e@zOC zT8Iw<0)mU(liYlvG*pH+B`nBiQb@O@)b`G{=gcNb3C(|WNnkH#wNS5_or!F#B+jJT z`9nQyD!5C>Xuf3S`wn7TNmR`~KO3&sD-Vc{4!#mAi#*Ov^j!7NljCFG9T@}#knMqG zHYPJwn5hWYunF1g0+k-UTxN4+1%ZN~g2b$gYpU)ch)cZ5(X|`2dJ3h|cGLjhlu7l7 zc@>hGpxrXU{lBy<1EjL)9#%DxVgi42HUZj=bp*q#?*K`k;k4(g+P6&QFl-X;etB=R zf>(j_;8u|{6-T<_b z;lQ#CDlSh=TY6H&l9m#bzTIr~B+Y^c8stEZhq63{BY4xbN@o8u<)E^%@UT4wn}#|Q zfMpRNVzXH0=d^F_W*vkpVn!wMT+1{-RDVp0@v-l^^L4mvjC=~ zEO4n}vswy|DeDz7i4aBSh}#}$RK|^DpC>~eXXO+Zlc)1|Vd?3;tV@L&iu^OZ2I(g+ z^@D^&=z-TwylfUZBHT}Q>#eOOfZo&24M*pThth3jE1pQ|#H?vv5pcLrLB%Hn6=N?3 z@>vz=@+76Dg+xR+rn5Vsg_y=hN+i({u(LynL}$vfrY}3xu(>}Io6kc9Cnb?tURZud zQH%{%oHyP{{w@RiXad z`vR+G2^x04p77JR;3ba22_0mBNOl(_Tl+jAH`QBvt9Dq}9A(zjnlp1N%11~qHX6Nd zyX6HMhT6L3SVg{fE5XQA06uG?>qDy(N5tV`poAUT!M^t7(^MAEnad*Wt856+cWpk|C27BAtV~NwYinCgr>l&fdvE0@$}#Qp)^gj9vxNnm(FEz+ zG$?2PU$@bg!e@pGE#3EQ*O3<0(vpDVB+;qHpb`Bf7AJaZZv8`+-cO~si@Km4WK4! zPyV-R9uOIHcZl&lAhg&>OzeG7y=^}kmlY@ht|S?``2r-{P4DpS5G3KH(=D}4FYC;i zbzY__;GM^T)sr$G{-4(i2$}rk0iEBCibjZE;ww&Zr_g>#gtm7Q^NF(H{SkjV6aSXX zw>5!0o+NQ8WTFrPe7r5l7t+^dEFs1{-Wy~$Q|+}Xr~dz@&HlP(Zc#q4Zjb9+@wmTz z*}uY^zsEe%d|-STftLRr3H|LYfG^tS)d1VaGo+A00%;QURY~t$1)G}D>wLXipxBqx z1G`VCg9&HkC3DD5o{RY}dCtFnuo)fdH;^lxwvw3w1xYuRv`*e>J}f2Tbt+V4>=h3W zP4A|c=S{)MKOR<7r0=I7r;Aj**OaWRrGM$JlKerN4@IawGM%g;q z`T6j{F~M0yO_Msw4vsvUBTke5KKW7UMnprXrPsUrFo{Z9d~)<$sl5!hfFhGXQ=a*< zgi}}`}v}3Yj*ZJpVS}2gJewHykF_?8Q zU*~>8B3ATD_-A)CmFsf;T;e}0Z~gi@wGr@TQSp7hOV8dl<>xmG?LW|2C?d@&3GAlaV1Ak8h0#bm1z9O z(iO;#|NaxB{_0ZgC|cJuX|?!cNBSOGTsYj^ir&bEx^?M}{y`4f;?E;)mcg-y9RZZ2 zm#@I?y+XS1oA=~LKyIgw#RtUQ)om->omD6IU7gA~`mv=Z;!sGbPs|no+SIoRitcvr=y@|B_k3{Xa zW7DUF?);=3>0#OFCmjUT{z9S%0>214>+)K4apBeh^Nm>KkI4}}PkW9HMH;=cZ6Fa& z4LFpI?+H)XYYYc@+*p5LCCc6*mz#M?R0Zxk!UA;o59Kr}HJ z=?Vs!v%1p?d&C$O7W^Qg0dit9Gm=wP=#IgSI`Fo&ybjKCz6km)7?0;QcX(Z^Py9htl*ntH;!pc4N_=+&h%6KkEOB#{i@JBp;{yPmP1Xy-)xNe)IW z?piPH)(Ujd<~CmGj^<7aA41BhYwZmCSTWenITaGRbea{Q zeAV^TQ#8?*G6Du))Q8kP8fxodU!8W^S)4tsS%*bHG3TQPcxUE^_LIv%n*>cPS5I?2U5guxl5h>q3%T4*J3 zZQde+v`~a&dH3DMX5MItzR-^DLZj<>tq9TG8TXH;5svTzMPRCo`hD@W`}Xqo^FD>3 zSZP25HN)|StuynLsI>k(D1?()5n+4Iqj9(BMPtT^4_z}tGbEZ!CPX4Z_;}gqJKoje zcf_=>-nKX&zNiZHRL4`lqHmA>T}g57)~fqPx=X1>&t{KYztVZ4Zx^t3YjXN#8qJth zOCj`Woh!9|^FEmv=q?$vwOiwLCfA^BE}U-XY=X{IX-KfC8XKHM*!=WZa8tC<{I(_O z4THww?P|U+I}dsPqvwzJj=Xr0xxf3~Vi})4eyv}*ru#4kDA`0aM&t`4uSkTB2dqy% zG8fJXf#fqyNqGg;k-=u`uLv)fYt1mk}p8?Zfu@Kt_-J4;rRf+C2|D>+}022oV-g`3V#L`2C zT~=G`9UN0qYHvq~1V7B~PxQ}=lk9~HWv9MBzOy+aO1bwukI5Ehp-2c7BBPu-Z(}Sv za!8dmp>;oik#ko|PxMe!9_`^rE?+J`WG-$HylS(z-AmUDCnk&Q}*ZDOT3R>xf{A&G1Gu;Be`IrwMh(XUc^! z_UV{+JY1PF24s(i^ehbf5wQ?1o2ET)EF;DU%QQv2;u9#NbDvdh&|TOgM?w*BWxJ~N zl2~hOj%?30@3@0IZfVjl#4_@_(N1!cPe&i2mvb3T0!JE04a-5Vj$#F2p3GW0GD$~}h9>1*_h9qt?+?NJv>pb% zCLn`@=Sb4WrapiY0-`=Zmo4?VLflScfOr^#vpE?&lCNao;AMwa$kL9?hq1e7jA5O` zUyY#8=>19832N-g4RVmqaD8;|XUge@*de{V_^*c?64~`YC_rIBFAUCwUth7s_%s{<$B2lnC*B2Yc3eJf9F6)PqYg4 zdaVmQ20l;7|rJyW#rf5Toxh=Jns zzt^B*h+IT8Uvqcj@PZi~FO}yX2Vg7}0e>bf;dixjW1WaB?TifJ|MEYe1kBdL<<@R7 zbf2D<@c%Cli5ct|ru-G}>>PTl5L9-v2Ui-cr8v~wwwBquO-Uy|uG-`c{+E_C2#yzk zXo1IIfS|X+Yhla6gB`i*)F-Itq$_o*V6D#ec(oFA!TCr)%7feK*JFx?=I_1$Qi8zm z*EwWTNW8rr6{*^0hrT;k@m;g++xgt&uA((b1kNh1bRxxan-PSxwrFsYjT;-rZjI3x z3-JMauJng&z4-#hCHy>5&S)Hae-@|T#aiR`^$3ijFA3L2zzkPD1tqxajLSVvdgwBM zMLS8EQ`+9d8FjfC2tb9(Ht<7mJ@b)srQcKhwzoSR=C{=B0NIg)y1p*+-L|KNLt#?zej{BznkhItg#V zq{D>i{!1FJuNN0nmS?Mo<3+UxqUO9mj-Rm3)*HdN5C}v|y5ZUCku!wlP#~U~C7<)6 z_MXj-j~I`atA1KLAu;J@;H;^6HT>Z$kXEWN(k%0IxlHRK;_css9=QGIH7yj&w_$AP z5FYII&&K(oQkcN!NM|46b6b`NX~I;RWCD= z*-@~=Y#xA$Ry*W2V$$VO$k97T;7^pN|2i+)w`qj(%SwVtnblB(#eR}UGH?DX7iaAU z^nsLXFCrv_$zr|k8jyHvnqQS@^t8I-iO%p|MS>gCAO zcYUXkaopUpb9F(cS1bF@_>zm40>O z{Ry^KVXj*5$xPwBppFyvV0aQj=gWe_#dU&EkdtFc{}`-q6FF)AWzGW)8|Nh5rj0+? zh*PH6!OX-NAmvH)CitB;{4PWIm(hPp{ARCEJjpo?bUp2SX zc))3$crZR$_)-ST(Y5QYFHOB*M`=s5?vrHDOXAMGgt5^~sK6}A*@Q?HVxoC53m4Q> z&h=m0er+0`WHI%VDx}=J!A(dWoEYufcywN;>{yC7?%I03Z3V*z6;_z5R=*G@B$+-R zCO{h-?XT@Tw)0w`dhIRswp=F!zY1$?v_1W-N1(77eLBz?^j5J>)tdu!4Nh7}GNpBB zXXvxpsh#0y@gV9QS6Ol!E<0~iqsEx1-h3D)T(#INVUJ+J&g_)IZkTeoI&hlv7Pskr z?A(>=pFj=4UOce8s;~w!S(%D|jdi_6Hn3t$o~v0LFI9inVB$GT5^8I3R0|C@s@r5e zV5NI15!=GfjcYK(PC96+5xx_Fql2Sz)yhieh;|oJAD)z?PiOa9j1k(XACD&%bi4;w zcw-CNtdd}B*qEMOj)JHAFckfX)jU(FSXont>tbDGK9u#wB<;>VXuiRk)?2UKTKSh$ zv6&Gpf>#VhUbP2f_S!WH4BGn9d01LP__=LtW@kX#RI!UNgCxnlwZLt*%DAJ5+wZ!C z?7Ig6Zc0k%vmN;KQGvASN;^lv+~G|gXS#iO5%T>R1m`eWDZb)!GU(9IbmGR!LwDEj zioyU^z^uEDA2Lp>IT849>ff}nUkEvvq2-S(aZDd}#k;{@QKL6zkn?2x(%-hNvLZJ$ zSoAjr;g&tNiI=bFcuqiOm1ti(C_`wR`Yfj4`EAvl;ULo6-xpY@NR+H%GeU9jICZV~ zgoT4ShMd|dmaK@jyE!81+v7NmC)NCqQOs?$p_RWrL8skOQWq+66C~==4QFl4wLICGi-iHeeTb-~Oi>ZEoD z2=d5Z{HtI8ZwVT{5&~WDt=msqhr+BOGgp%q-&-aK_NkN#=I+@#hD~)A^pDi%?xJ&g z&zqMcSL0qL>Q4s(9?uV=A2!?;bR8ci_&pvsU$nP8fei~ifZ9C{KNszl_Y0Wi zVkN}q&@&3lyE`i@y60+a_=)O)iO2ncpgiA@fLo`(RI07UW=OsL(-fTpIaCDC4OwK@ za@y(`1lF?f0k{_rFMxTiNpBK;Fa8F#&1H|vMU5P1%K^i~Y^{2u4&$kZEV;{+Q&}Eg ze?I&w+nHn{?s7fH3i9j$NA{B7y@3{ook>N1eg`n2s+s-s`I!WOo(ZT;kg}55=Wg3L zo;xC7f-8wejH`Dx$`SVJiTB8x?t}B}fg)mCwM|h)~UD5SIAWL#vnkFV5UeHPxP4_*5 zj=Wj3&WJ;S16!j3DhGmh91>h_DXz#u%;ug4i@u|ajOW|x&vMl+iGnhvDn{>DTa@;X zc3aas9LX0R_^r=ZvcsuYBm?2jFdhP@>~En8U3068P{63~W}U8BYC?T}V+nV(j*g1% z=CDj0-J=w5W_CyhI;Y<_6xNFtLJV|xoD>(B;||NZvI1_)JFPvGeNLfLXqOFRWIc#g3bFm9aZcWX<@oH$eU)!NsbwYINaW;F**fE9L z5J#xZcosP-jzk}-Y?t5+<4pzSoTlmkWR0>u?#bA33hDI4k80fsqw}C{9VJ>ODpv5| zEF=E<@DoiYWm1&NZ5xGSTpQ^fDUYkX&#ahxXBy0woXI5<3OG|?^KwjUX2Iq&!esJv?xxQr%cw|)# zKC2;{4H#XZl|V)CD8?^pf3z5!;bw=8Oy-JE^fWrq$T+Ox_1%XO+0lmr$SzIqVU@*8 z?EU?F?hk)l0SENo(xbMl$kVTI>R$otO`?$V!SVLI-MyojQftnG0TrP1PjN8GY1^K>WcEmY-Z1OkG$;OFXGV$3sy4>-y+TE82VzZni@FoyAz*XHCIL!V5f#G1jX5%n+6F%*HyBql+vJ-@l@k!9N7 z7;_Dzw~udJ`^yV{nT9{{x}#o{)^dW#+WDhKooj||5gqA@^oO{X8zDQwR)pv@;`aFt z;fbV?J|HRtYoTolRz|{27!}p_1O-DlEO)$8-D7$FKKykRVV@aQtX2kZuDolOaQofK zFiu>7XBrE!jb&S)pL8MeH+!R6Mpla#YcTgF9?K%yzBQlU;ikh_O^{$AH^T^pAE-;u zvIW)7I54&9l!^KFdx>E3mjjA@IgagTKO#@)!PZw`AF@b}89P?4)+YMURFJ{s5waac zhWk&bA*Iv+^f|??pR|Ay{@1=jPV)63hNC6MFNK+=z=a@O*TcGp>iR<-Sv+Yx&9P9~ zIf{^AwW|J`4B~tFu6xaABd1RrEdBL52cqUTCE=#cjhp8)ofyJZ@Gs9NR1VhS?Ad#a z7*K5NMiIE2l8R>j{ic#Y_Kq*LWX2E^SqbWlJDNrj?O)3QUb2gEU?xU+t{J?I6V3{I zQ~D?GZmQQ^NhK{)xs_dqrn!_Z_?>1QYj4#%vxWq+vtY+HE)t|_5v?54#5Il#7qe>A zS?r*Y*x#aobY@3)W2D^GO8pkM((A!SbEJeNh;)1EH2R6tMHsl1xQ=_u zly6B8WznbWu`A&=0`Jaj#4?=?J9Wbq>sg>^g-WuIh9_nM>3NLX+81n&nOK052#4vd z2VFhy!ML}F6=>5=dr!*C%U4olaEsm3C-TLd7xrXUmZ)?mgRQVl$`@vf-&d_~wh~a? zE9H!lIH^J$1+*l)U!{C@YmIOwb67Y+z>($d_##|eD52lii-z!N2<_Ez*V) z)v5byq|wKpKNaSw4dAfg^(@m!&LetD*i91DHveWM0E^+HAk?K@M$rtMdd5xT+Nd>~ zWbVeVNM(`R#eafRecy}W&+v1+w3Ho|3>6y3w6K4a@GFqOlze-oGSnR9!}Yn%t+V}A zM3A-naE@=s^2p_#$%q_&Qs93eK)v-whF827PJNrLaLC1N3Jil5My|TrJ1`)%)<6sq zq-n50>E1UL0w!Q!`;fawd0)ZHfQ~sjMvdOWzA8#&!b;-<|Aa}tERQd4rdF?0W+d{? zaiLYdjvDG!XS~;^Wub`W9X|OEa}{wMl@9yZlxG|>*WYqn2jDUgaSuT&x%)(xY#{Gw zGlDT?Aj#8|FQA1=UGF8D)$iK40W$KKs|a3*%8!ku67c}Mpxexf2Epenc=~Xdki9wD zI+b&GQkJtoR2>+d8J)Yyddl(1;Zd*R^bwcQJaB-|c~?VmgaU&MM8L zhVh`0=l%Uy{pHW;hm68&2HH%pF#8-l7TEAY+PRzssK+5h`TfE>TWVXt9izP&q~cDt z*ZpT3zAINWJ&ze=CFW>@3)Qgu{?abW&?}>G92#&0hkNDEx@ITm?;NMz(Osl9skI=cdN#@4*P*KYgsCiL3>`3!bvEHZ#pC37rG!b~b zs55dVH6DFP0PaDockEUeH{ww|+$CeN%|%kz(79VUVp3`-k;Yf=y-i`6D_zwOFjqD+ z1%!;$kgY>%5kd=%6$;gxH-YzO)^HV;;_s@#=Y<3mYREtee^mPoWB;rPtmPZ#7nnsM zkO9cjhcqJB-P>}i7*6nH(SdtDSCNV*-l4zyzgF7dyNkn)J_>Z)gTMr?q#%{t<@idW zSy1Fk9Z`h(p;;7Li{HW8hIRUbp~L(g!SbKr{fJ}uM6WJB8Q-kwl41hUi2c-tJD!{Y z!s@Oi>#nqnNFrf|YH=qgoJ`yZkznos7n8}sa?C5;VNl#1{&3y+n|rx%58{G+3P;LZ^4GAftD=8!1W4EV@Av$~7G&L;fecJ7_We6pB`7d19}_k8c`F+5O5>>N{@#Kaep(Wpjr9%%CgH8J+rzK- zssa)A!w(De60Z0wYwhQ=t-}uw=hUub=7smZR4sAiD;?oBQ0+_$?H2Q(mw}uMIJa4D zqsObo?DCM$Pwj)M6}Q3MHOnZj2C;()jm)-g(ZiRHL*NR%PKQ|}hg_qXXxuS0@)mc5 z$e&*=)@3ilIbMgTyxvxy`ZrhgGcehpAL@7Oo0f;5aA3GvT#qNx}ItKd(#A#h= zMqo&pZxal6n*l@PHBgkleu76dDA!#c3-K}J2~H?*bZgr5zAXyz)q6%6(Z^(Tc^emQ zwU0xZT4~=O@fQT_Iu>8tY*3B!D;~;LG`s}ZB&J2yfh zDC<8O8+o6}Jva;qiAr^`Q8)7T86-R0Lmce=@#%uj-R}!UcjM7d z6$TS$OqPBrNhmAt7ZAeQ!y8OIs^*)YQ|fnZdG00cz&4&{hX-E_27TiSuUITO-c)zP zP;@Cn@MkvG$}7XzZ$0u3;(;6zj2#q6YHW9_GLDv^7XR^!Opt}|T5x-u5Hi_D)s78Azm>w9BMud25j z;|OrB>+3l+1JZx=TJRe~QsU^+S$M6~$ce8fClBqAby%Wto?}{QCw7ffSUa@@JH{El zIJ`tNS+JT~)-vogNQ*yZ4g?4fbW**b`Td{)^G$oDL3Yn>ETMMbS-;)?8isadRTK17gqb1xll{^awjsk&lU}n zQ!$Gb7c^Mxih4dA9lh=Z&29c;+96+Gfj20<}oaxOp zkccb=dOU0LU7Oby>2B*d$WFw=;3!9Crde7oNJ8(Zd$I?3vw<>C$Txa{RFL3|>G5=Z z1y*Sl6rEc=r-0-!;z-`r;$J0VABct2U%Fl_I5%Uo0H|$B z!c!1C=sGX3BbfL@==22Zfm{}Fx(9j-5KHc@?9-WsgeeL9d~T`1U@YhL zgxcct;1#P&6}XxF@<*;lW2wbvUvk<9Kx?}ljHbG{$sEi)Aa3T~eDID_!4 zx5Iio{f5Z3#*Q)6XfreziFocWTIX-}es?D~|G7)#$!E~tqM!G(?uJ7j8 zR3j7QsBZ_%!(tnOdm~h#5IIG>qBE)KE!KL5D;f%D&-z+IWKey)$P4FBGd@<&CP(yd z zh-78OelK29i~!!jy?(&}&$_0RZT<1+=y-NYhcO#_o2nScD~X&GW_6KoTe(izsHj-s zf>Ye~@jkSTjZCdb7p3A7-N%nK28P*_APJTgNls&a)beByy>#AY_>NPTUq@AoyAxy_ z#TY(ZK`BL-nX7l4r9oWbI!zTqSKsk}Wk%;z6Vd{RC`D!i<$cZH>b(xj9qqlMRchd+ z>M3i}+CnD@^z1Mh>6kU`!GQVs0M5eyzKBDEh>3}vdUUl-?>`QJ{ZE%xE#D*ifV54x-@w|f6nj!vD z!Ga+sCYf~|fpx*he|BwArzof!derl*2v(JML1B5{8Z*@%3SmM83ROcfshqW1>{TVyW8EaI14k5cjvEc|QLh>KCe43u#?DC(j?KWdk=gcTg)b`@d<3@BN#6iRBoMNg@w4mx9&{0==R>8tn6 zD?*QK(IkIIj&2xU@$aR-c@`qExBuNCYQP5#b#Qnh*32nS&6~O_`Qc0msG9a4LDv5? zAY7C2s&}!gS(eGkfhAFtOjgqevPXML84`hK5}w~vC2>!{Pbr}gm6#$FSe%1A$cNBS z$q|X3qYb_g;42_a?y$tqJXzs0YyRe>GGjl2f(lY?n2BCfib9Z$Pq`!kgtlNrT5{}^ zt<9m#{y?~gUdlXFD@e!Y{;1KhqZ$=Ri#WFc?RtwEQ&3Q4<-v|!DPU2vT6n=wqMHLS1Rtpn~U%gYm0oGO6P4evLoh zi7+2z_b!*u%OJIzBD#_>qG4|YWN82j@^yk8(aqhZEv4DnP`$a=zQwbj$= z6P*2>`I!Ya0n!s-G2g(rk$8Y_|vmp2{Ah@EhfPQ~UdWfo^rRsAF1IT{#X0l!PO zRvNauLd)xkboFZfp+@lWHeDel{XF)umbXtjZA1!G)Q*8Cni58jGs<63pPKdmqhPhq z;wGjg;r8kLJRl}Sc~T4Vur^%No_G-UasX$QY^@vbj8WmXAO?%82J&ZYkb15RmW3F0w4!l+X?ezjm>Dp{o!xu z28r+&CHZSzzlx+hn5Jc0op70Ns(=SZqsQIWDh;dOr6vW{GlD(f1G{AYIo^IL-iz5${ZBU^j!wJwwup1J4FG_Fw} z!Ha7ndvB+q@0a*9qCX}j*v-+tN3qE?7G7kw6(uHI+q9>a#FGRKX76)Dj(V7APK28w zMXphqehNzCA-RQTnzV(K*iVlPI)+)pzu#`EXy`%pBipB?1ND{A>;La_{@L&eOnJA+3kDv$8SuKy=#CXXa^0*X==45 zUDMeE0a*40D#4q7x6_XpI`w`fi`kpwXT5kKDvBEv&`oiqzqa4(X2k8I2;_>&3o)wB zjY=KgFl>bxvj2n%djoYi?SRyu;OC0}z0J%J2xMWGvn@NfX^k7H6@8oWf*U>NlpZIrJSnp^V z>9VXLQ*J<=OvWqCTeW{)cxl0F=A-&hXo%4{7IyI@$}e69+lruIIg ztmMwvAdZH#v{I@Ki>xfddP{jwxJhYJUkA^N<>*&RrIVUv&3#~$==7j;f(`rPgCpnb zvOA5uU+L7*W+ObVroHxUgAKP%;^%&i7N(v9O^yBQl=E@LpQie}O7KbtaOdh|>$E4Q zSvU8wm-eYIwJKblw^QH86K$rjh_^o-KE}`9zuEC$ zK0~w8_v$Vs4#JP`VAd=k=C)o9!q?qdNFJYVuCC;_GD9DA3qx-pTiLc_lvZedfRv60DWMk^`nU7Yt zEvjQD6{WowYr`krCszCqs@h))9F{zaoduc;4a=2lvhGS*4pz!;!PQD}N<3M#88&4a zDx(U4}W$xiZ#sg<2LGFEzEp%Zn;3Xo^X< zj`A1FqpG9zcpA7b7uBn|4+o`6rw#BlNnKI}8alY@5Jszwd4`r&yJvn;4I7OdcUMO? zI%_#+Yp0Ei3s!b8EElS0tJ~HZ))h7sDy=XpQ*>ocXB_HuaX#%+`I&Zrf5s=>W_^oK zR`z=>pzjJ7w?c7kbE%bP1^REsBD)T`?0r@>pa-M zsR0elZ7L%wDhu|bYP6)oKJ1db1LypFXLlDmF1#)ma$cf$6PT71{Rbz^Ny!OQHpECX zfECOzO;(^jxiSDob_+c&DkVrca_D>dhuY3BIN}dDPDVNfS`L+F`Y_dSu;soNXQSUP z3I}KSknGHnggCfgT7WP22S#_lwxpyaEtIJNbe!+|xSdmhNBX7Dqi`befA%N;Zl53^ zAM~Ak95m{;FQqk-s*H7=US?X>bzzXq5ZCs{{0rHu;3g`#brdked2a>XBw-sdu;&V= z$WVx5oVc?vy!&^<#v%+7;TNIPbz`Evl@r*2?k==+*hm3D4h)1!sVC#&Qs6Bya(l;A*f=o- z5_tQ1lKJ_r3b%OaMcdI*9cFl3md+GHge-qhcqaruvAgV3tGP&u+t9>VOAWsgk*tIi zaC<6IzNC+j=PLm=T_0$1WHP~NX=#LDR$0vNc|abJ_ISME#DCxLu5}1v;^L(Gk*7pK zYHMrFzA0Q%7nare9dhqavxRm}EqhP3aNw$^nh2G+q3oOXygGhHZlsQc5{2M!;1qLz zN*kLT>1ND05veN&;x`Oz%z5o|b@&jiqGYh51J2I&mS>5xCM1xrnQ(r7;Y=6N=%Fx~ zLH*+=kJv;ph4*vD>dT|4=}HyecQ+197ba^k{5LR33|zH#AP1aW9^UPFaBC?sX-3+M)&!z2`>1W}uQa5-P? z2o7cgjAnm+yf>YcmY11CDmX2QrXM#HZ21ld2N*$kS%wOJ>agAe{MY5=!0+#7zS zxZ3eTMBm)`Ei-shL8{b5u9;}>2yAj=W6Bwh0Z>tRoXNY>9_Ynl@B)r>eNpo$V)wG! zO%la(5_HI$-*2c0@b1_M{7y*mK0QFQ30}}bWAVaXdhiBDltcATTSsn&kSgMbbwfp9 zInp*15Ryg~ zVmd4``F)|kcWsFMq<%q@B|3g51%`GzU7sMGjdAw>HltDoJKY<1PE+2i-TmV0->~x? z8A4k!5tHN7X=balT;Jo#cmCBv`=p*`{POe2SbyEA#^|LEtDgh~)pT+&PZbgY;LD2h?0z z<8O+OwInfyNEuK4W&5Y#juMJ4%zAxSd%+NDRg_~Rk{Lp;L9y&!w+3A>uDIU$AfVA1 z8GqzgN@QXp%QMtsuW(S#@QP{gK0^vnB8cVr%1xQS{X>p)eLoDC*U~#)6=onlAC3+# z!9b{6!;!IZVNs_j3G71@z>v2Y((q=kfIcM8?{_nEogPllZ*N9qA<)lzar_I3-^rmh z0c>iHsI zmJsgrirJwA$>6y52J3M)N22Hkg`#ORZ*86s@=Q@TXd!ti3C)Rs@QE;}1o}Pj&Vgqw zF9a3csQ95)qph7|`EiBX6Y|k2=JAoB)R}+#=me3$4#2aBa}5uxFq9J8;JDshY-gV= zrR4`r;Ej`dhhJ-oeLbYryHGR6to#sWAtu>^1NI&KT8+ElPfkWyFDtK4&9XK(M*_Pp z;?Tr3HDPqRjpz2+Q%?WJxE$8If}IN8Q4n6_YD&CKXa*H@MEppdKKIX?tjAL2AoBeomF zTbW+JtyD`5CR-T;%tk)UH6|c7Ye3oW2xvpH<@-hl%t?yI4g&fe6ae?%cOg@uu{r;( zHUEMICE)+rW()*OJ=M&^BhqI+Qv z^mFhcM$bWbF_xF3L$jHFyEIO((xah%rJqy(y+FjB_JYoUEvqo%2I*X=(f9X5?m3(Ri#;fuXvCzf6eU?=1%Bgd)jX zKvZflf{i0%2z=RaR$XH$aka5cI%Owe%ZIRMvUtLI!$$M^H-jC~1Ak%ThC(Q#Z!DH) zM?Q_nu5xna%7_y6ZbhxtiJPKrZ_9AO%9s|C7ZHVJvTkh1db|R1TV;zw5=6)mZjz&+ z+k4=n-BU4Qf267A#OI76>$~%B$JQt2$^Di!S*3dbRxI^X`8?;`K_Yu)Uy?iyy?aB( zqge*!>VrAIZFjtt5AIDLSLEax@9(JMNMbFnQ`=|L%Ab=Gk*ph|6Dnn3>>YT%KoW2 zl!7p|ZhAoLJ|Q8YTH~?rCTZkQ%uGyZHz`R;ZhZu=QymZw0x~H7>dOPPa6m6ej<#c` z?XSTh)g%fYa2a$3n2h4Kb zgMC!+diHQTSs39j=~u&L=)#tD3Bo)V-&KdMef;vI2E6F8d+t44=?}XYF$6EM)!+6o zQ~CTDKV1r_t^E2IsOl)MPNLz)$axq~Ih`$VqOXm(63FxDV)A&n$*#@xmR}v_OMe|%@Y^}se$mrcSHdBgNt=_w_AD4&L$TbK!Bg-9 zcs?GW+|?KgsLO6}E-Y>@j}P^Jdd`C2J)XtZ@MQ#boF>oEO|Hx2iCeI&TB%`OTd)Vc z8}*$Mhd!S)K!H-e>*ZZ1t$`7PNMDqp4oKsHy^eXK!W*UBouA{WbQX{v`gRez`8<(r zrm=v7e?G&?%cX{-3?m6u-DXR)3-FUEIy$;_8Mn?HHdJ&4k+EP^Nme=v&?pjT`A!8w zbN%H9G4joh)R6B9m%B*M_oXKzY2@s+3C0nX25|Ss+4Wx0Eaw{%#oH-T z)LARyy>A#HV_^DsyvC?Jz9dq`NeX*zC(ZbZHy2=Qm|`x}%zx)KU%B^kt#{1R?~A?z zF8ap{es;ASWAd!zQB7ZL1$#J|^%dFX{xR{smals(#ru;?QiH3`V)<7M9lMY)uV}UB zoCQy>3#20U=f^WIMm2s2q6o?42VsKov##TMG1`prtU)h6@xs48 z>3Eq{iV97e-H-Mg=`@rFp3>D%histp$Gm(z*1wUC!60#j_fKd|GY&%*$9a#G#pO-Rb&jx=shE^UYC`vIhKv;Y*I-cx$(_=~; zPVb-0RcZn)gOXG#w?J!pcnBT>Gwa;m)Xw?wU`u}mbvW=PY44ivUXdCd8eE0)E3%)0 z&AKD|{%wTRdJ5ZnF2i>(^-(RFAkAfh;$_vQ3r&^n8Xa{O_+aB@+zCP{+=Cy2D?}hG zeyINw{jufu0!ag~6KIzvx}+d+So!6Z_(bTI`j!zDLPsARqPW#A%(!Xg>In-RPX_;X zK^qEma1m|B<1$lg4l;Dq{S-{bUXaS<03rD?VF}o8Ek-89JK=HMkO0_NFhk3d?Or5F!yl z$rX>s7X)*jo7jI71CQ|6+3H+lU*&>GChs%SSTND-zOWVv+`?Hn_?D8EGXuN%BD!7| zJA7Q%Pf$Wm`N2K~Zs($A2B(Y)*8^eT!LX$Td+&D1FMCHpK~evFb9^OR4lfv}y6s@g z$_DOc2p(@1biA7Ienq;nFLBM$|Jz(8pbKH$2Esj+d2<{bO7N(AFl)7cwc`26>fPhb z==vu|?g`Q7n~AR9H@8H$u!Icua>ZyP^cIF%ydmyt2&9zvZn-q|n<3hJtj}~M#coBo z1!p;N;F=-xHc^NQQzlX7MwddHCPJwqYD^KeYFVJ832n7QyTh94TwFQc&#~!<5y^87 zgSdSNoG^#_pBD5c6?^fR7)J7^TIzNCPL&1xS=p%^_$K7B!U%q5rxhU(jl?U~4s$N< zlf$Gf=~Y)FW>FcWQ*+RQpBq-=CGF`n3%8;+oQbF8 z%>^J1;Y>2V1Kq}FMKUW6ho9^VTM z4ijg8t5H-7xz3Ab$yPgtSf`v?1A1hGP_}_GGx9d^)rx<2H>Rx~6ZrpQp1{V*soaud ziqBHQbqvT6hm1&{VAeh4Ae2vz^;f6O|NiwWF+4U-evn_wP5*B>`t$pM+iY_ByXV^7 zN@Ggkg(7t%KmjZniTn#I87P6go|l)txvQD5zcqwCP~c-O2E=DtggVLp-fE|5`7?Ak z++Aa5n|U;YYaRqqf-jyq4>2*QqGBVpoI8h* zu<+uRg(m9}uw@UAjTN(`uBmjQQV`)%GS1QBhO4Rz%@&5;rh>E&ra7IHC?0>s%44nM+U zWknn}JglGZ#G_xH@t_*3qxAhSSG=I|o@$Y1L9K3jp8>VT2N;7tif5^n99C|gW>T3N zx!H)Z7!Z8XRL5t_l|{baJ;+%pltl&Cq|v6so&%^L_HWduj+iz=4RwbKLL$jHs|7wnH;UM7G z4w{cGBaREnlj{?V>MS?++&$jkRy-;3`dn}dT%yG6U4AA*e7hbeyXc&N0GT`p@e62U zHr*}Yod4-7KnL`8gV_3Tio_$3K~tVR-n;?NxLMZo+f6_^+$s2?1El5nys)~4Vqe-{ zHj>!){>}Wr#dxyK`TX7?FIupMKjmPS;5n*z#`M~|JaA2GtaNod#r<6#EfPmOampV^|CeGD!?=hZQt?>-Z#{6B^hAt|@fbiz z3(kY;<>jSJ-~i}$F0@##y0f<@Odta=`pz5q;z>(VhaVYCBXl?(`OKC#OsRPo0KQLh zt_tc>p;#;H^0tE2=Z24+l!hDw9u_;$VJNFYC(qZI{7z$aHY}y#8FMJ1(%U1z#m!AbmFA1W9UrKMuR{ewPY3oJBP=c_%|B~FplEz?TgJ)+ zpFhjLT3C0eHGt(~nG?F~gtK(65{_@J5q_QmNvNlqpQO|i|CzgRw|8!ya{WN^y?IB& z*-{tIRQE@(NU$FDS2>!k4qcQU#K}IEyZFc^7OyWzsElqnM7o%@j9~ZXrsO}gQ>!k- zbJj?dHv|-v;dXDp@n#qIW$PZ|hTqqR8cFT*X3Vlx)xy7be!2D#=c7oV9R5epTVY2w zg9JW1q|O$Kq6gO;ODf$Mq0xSzJ#k)0gt9)X)SS0TtH)-OR!+B; zd!+V<@494-`hWBnu8DV>R@^a))c8J0Ax4!G`H9I0Uc2Hn=@qu!QV{bhmj%|b6 z+0{j*E9%k|XqLB3rWdv!KSJPj=Jx{ogpeKngi+JC62{d}Pla(boij3Y92FiT{{tz< z+C8Z%9A~kDq<0q(lX81| zH5Dz+&63(YC?E<5{QU1<)=hm^x~l$aV*H#>ub6oN6<}6PaeIV-#}1n`;eq} zhV1d$l;J&%*=)CIS$AN|?<+Q`7Z$j zh=i0>eoecpy&GWe5aKs(s6S5lJleKOCPyu%r0eE&VX7~tks)PeJoYB236-24x_gFs zt5w;}D`cE*)GM3A{y2y$S+!zDuew`8U!bLhce9nO5U=^Tjqc_Kw_JL)U zn2in5)C)C4z8r)-@}xw`6lXeXxn)3S5^)(02E%(s9b*nQX*^-*Bk|ufszWZ)8Qs!N&31zcjQ`7 zHLBml4WrJ!t=Mhtygt^HZ0+N&>US@_#R$?iB+?T~f<6kz?2;_OT`Y06PjO6T}rNe-`u+G0?Ldnmks&X(L5nQ4$%S< zOtFt;5R7<1t^InD2(SbE#D#eFHM8@|hTZ6JvuoPQ7ljjrIW-HYB73zd*rI7Rsb~1N zzP{Hl%HKr);5hO$=CYAZXR6)o-267WXtA3)Cd_hin!*HIOSu{!#Z>idNqo`!JDtA^ zX-xV-*`Btxwk%0=8e=~N z`!)kH8FW=Pon_P6+A1k>c~PR!$E=!fwlNv^4xo38r$xBp(Io2>)!U2-pt`TAV~5T5 zY9JZ%GE)*l!kOt8@C=z!i44B~dF{>Z5cu%S#o~oUlL`*a3Mxq2o-bjK@>P&1p!yYi zO3sYh_FHrlRnB={(Z}?J_N5a_0TC$A$+T`Nk45#+*tdwgRI4tzl_1YJb?SkS6Kei7 z5zVcM8WYWo%7T~-f5t22$)Uj(nQKt59HFtMpHH5_+iTushk1JlHeKfmJW0JSAY(r` za{!OobWC#|#tie*I-a>c%X}~vDugCJ8|Ei3v@VRH&QC(m2Aa{jqmn6pmdiKZehirH1YzW+??Li_JnB#PdEquOA%2M_^ogL^uMm zZ&W=al8G8`p4nrt2i5A;T0zy9spp>05oESXGmUadPO`lXfeB!D_jucmm`nJD>_V_- zD%)2Bel>D!8xuYzF)}vrHahfxEEgh1Jl}`SdI=-=_E@zouN;yTmZihi9}CRELpZbN zq6S4Iw_~w!V867~TE0yss-YwS@!^$`;315lYaH!wPN*wYr-8`2x(psSY2t3ZF?Y!I zPb-Nf>^lPNbk1f{bYJbU9oOm=}Y(i}7j3Z6400hy#di)_@0RA6MO>+j5?Ki#nsEkA`1?OVZAvFn_@w zy8-3kv}K6qek8(Uqyo#aoaUyl%UUO==$CtdwnWU(r;hYoix;$aqj^^O4CZZ! z5jSJEE9res-FHb5L!d`Fyk8AJ`~Z98`eT% zk8q~wFUfTO!`>Oaccd6gw%-1p7K67n!W;9ej_cix8i(v9RfT;$QyyQ9YebyJ(?CM@ zoY0C4UNv7PkSUAUkA8ISQTn~%Sk7j2a4pyVBP(;N+hFxd^`CfdO{Gz+9dQ-iHzPx{ zQ^)fa+`}>M-;tWHLPhy&guNqo4w_tV@K|jBqVwBw!*xVs$(;Vh@fn^9(3^*i46dh+ z&$1HleMUJNT~Wt%34 z83j#L&<8=7;CeokC^)ErR|Rq|2={%V_yc_316gv@-&Ta!EWr%bi=9z$7#yC!|GGO$ z8V|*dr71bkl$@M==XlPjRO60vJo$jbC4^#tf4YA!fiJtrwwXN@YYUjXBUesEh51WB z%PoOB^u3ADZmA5~&5XNnC^PUa)N-ZS3Xa~?RZK&ZVSm32{5js(@52$pb4w#AHOX4D zH@H|dKI@bMVvVZN2k&D>yh-pzIo{4qRQCn$Zs)B9 zkM~ZD^T7;FtlGIFRRQngbk1VASPH>sv4i9~I3U|@aCh_5*-{OheB9}$#~ejfJlbUd zB)5F-JbqGyf=h1*>X+fRKj3=SiEw*hIa_Hn%qWaMf}vP`F!+nx?2aa)oStwwQXsoz zFNta{$T~Jcqs`IeAB~e-tQ@6sb9BIPFb|oL$ ziuFA%O*=skO|`S;A)Sea=%oZd{u#y`>Ohe(re;|{gfl4su{}$ zKBSHOIZ8OiOmU1uoVG`+u6FvAy6*RvC7#8&4Lbj6OEw*g4=&l|6DudL7}gwm6W3}b(yvu;5`)vG zZPqpg!$al)@_@^SG1VqptuD=?>G}3X-g0ehYl*efS(8VWs+=8A~ z>Wy~AkZu$nA1nxBIcWaLLC0tRdar@wo(UlEkBub{4hh);v0AsZ3Dc7T z;+$SaH$&7u@~YC_pGGAWP>dgNmjE=GJ+=;Q!h$yB3;o8uTYRhhLYoB@lrtD{h5YeCoidqLjY>4A|s2{{R$~HKat;?~gvG1iz9%=ueL{x%ZH+v70z5WcyRYjX zv7x-e5c;Id`G*;5GW`o!!V$gDU9(N? z8~imEV3%LY1*c_Xw=H4TZsYFR@%8SQ-4He#pBroHOqB&9d_`J5yqDMJY(OvGFLR2H z2}MFV4uR@nEWr+(=kG#y8hDLz519AFF6cNXs{BL{YT0pbLR& z#1cJ>olz>%Pq5~yH&i{JTNJ*RJ(%>CcJOt|9`Gg$*~mf&YE>8&jm}({st3UvEGlzi zux)@?4cg%IzCtI3t#plPW%=lc3y}hzhz`)sT(z<~JRC;U|E`f&?uFNp<;EBPaC4)4 zn9kG5hSH)cg-s^19*}Zk<68+gF~i{2W)zie)lL{W5jwG|uJSTf-}k3lH@L!KZ}>Wo z@ce)1k^X}h>aB_rs6+9`ORxt1t>dj8{&@eS#a+S?`Am`40%kVj;ykJ;?lW=g1%~~$ z6oLt54f4MdqgDM@ zq~q?8&R41i!Ov9GM#7e91q(WI2Ek{hhA&B#cZ}qEp^-KNlr>aVmA2-(;0bU(sncCf zgL-^F37GXV0x>EEYCqzhW*gIc(&{-(#ttF)b@`is8i1Ku$q59oANO@9| znB(s5kZ})b?_Sw$3B|=b-8_!)Z$FP*{Bp2F&9-eJ zh#Z(7*T_~X*8QuxD%OohEk!}MZLrM%wi*$- z{}iI)27rvbJCR;_|3jbt8@&enWq(Er9FcOfKU0OB)EmMZMEK3HVLE>-~M9~EUXSnw9XCm=D5_k|vZ*b%NkYqlw3PSk> zP-6oM6CKChX2y(x0@b3JW=Gz+=Tr&B#e2xiT&zI1<=YTa57YGdUHj{ONos}}bh0EO zh-cm15CNVIUa4QIhLQg?spT-*3D5^su%Y+T|GQdTbD**hfddN$7#E5%sXMIr_St&aLO*3(D@aBVWaM*98ccB$* z>4Sw(ISwRiU1Uc@okxh;b8kC#aRoa)qGPQkFishkKC^(cA11V7;e+g0?mHwNAN;fc{#i7D#AxNi1&FG=d;u4 z9ZcD2+{YK~z{H21$U#6&0r3!#vlKtV4Dz<0maE}pbwcT7d~gVJu_3kvrQbW6;ciB5 zCA*Qi+-D78_{_}J#~W{a_b<1?JgW@|_~1yfzL11+ zIf0Bb@`8Ia%nrmmHpUb=L;ldvAab7zX;D(PrFHyLuE~k< z81i0bs(PF>6f_7UOG?q*ptjHF2Wsxd-0=>X>HM)x%3}FqxstL-h^32Q%4`hcOWWDz zi`%3Sb3)zmjm8dPCAuT>GjRexIjR4A?{(k4fJ|MwqjG+A3Q%Ic`P=6H1j!(i{*0xx z$3I+RhY+Td%$Svuk3fxWqia8v478sRH~;E2cecArqVDiJzDs@8=I>;2Vw7c{mN%nD zd1=*zVA%#})gZs*SAPhVOEmwly76ySB!x(TS(9PU)J-y}6T9;_QG#DyUS2Af3w10W zXQa;?qH;E=$9;Sc9;4|b!guuWV|Byk$(v(zyl7k1^8R?3ZoB;aIyx?bY! zbP?-e@!jp6d3(5We@)fE_;{?K229i)LM(U&09?yNjQtVSlKykqyh=8R8K#>`nJ#uU z=fZN7z6Tw!k43G(afxaROrtN?ygNe0` zNh|)~!bwA?fQczVeFLM-4myhYGlB5iw{I)WHW`|2iGZs`%w{qFxHL)@R0DTf9=uQS zxO{CTl1E5@fWpPv_06LafdlQ85QI9ApqedJBweVPxp}LmW8}0Dx4~&?Y;UZQMOoa1 z`}_mfrJse9AcQ+nFi8Xkcl$WLsBF*e@qkI3P}0@ebgzig)D&(FCn%6Zq1%=cam{7} zk2EhdNT&6FKX+mz|3lr{du4Vob;oxP&-_#4q^5-R~r zB?Y^NxquI3#Yk9uiAgKN`}J&a>~zWc@Zm7y+CKbWx8#W`@!$g0nId+)EqTwp@4`KK z9l2p@)xOOM7fhBbYb94R8*FL6OmN{f9?B3MFVrGw!9uIOZ`o8$0RQI!{Ko`LNe^-F z&PKtD@>Tf?ci4;ESbY%6|Fsb=HMLeCxF|rNU(wf?d=MgR6V`gJKB$bq$>6r3-5O%K zVLcZS{o}pE6(JulaJEj^$^AP`{>Wm8RqNV!96#nbft({m+o567vE#Ka^nSPMk+SIe z)(TVtNibb8^&LUEX6sa^cvdP%jBlX_k9-hnqwKV8`Z9Y;O@X=gW+)VA`8W5fK zc57)iuw_}0(@VzHLtyO7r?aks7WrGC1&%#an*wwa4YZw|9fv}6Z-uh&Fn%tt~Q79ZU7elW!g%@o50)>Rc8Io{x zXf&~ViG}k*Poni5RN_-EJz>E{cSh}OTmn+=k70>HNq}lbKz1DX1+MzqF4U+%AlkYb zSlBe`aH2_ZXEX~lHA}59_*cEjY|L#Sr5ZoQA=>%aDh1~$R`=^YYUjhabEYzoh&=o&znccg z5sX75wx5n2E@>d&J6assxiTYR>kH`od;Tj;5I&`V)KE?=|LFPXW^%jxXPuH1Jvksa z7{qU(0yd%HqOz|Ok*V1@$7L(@{HJS7xB3g*_`VuEQ+&(gH8lX7G6RC$kA$QGQ5+LM zm^zveo1nP9QleYodq!%N!L`SvWuv*mm}_g4D`&I)6D5G|l9l>&hq`5qz=!`4ta#41 zCi`B{Dl?nUAgp@^{N!mMlpG z!WIV9ybI)B)##+H+n-h1b^}ceUJxPnVR#O^9vwmo4(z4+>5pOa3f2Fq$^UGU_`9s^ z@57i8V*Y4r?a#)z-jIfBUB@-(%R_cI?+~9|f@%4eeG`Hn; z7Pq(4Y|Bx1hiko(=&xq8wzFM~5KeKjeJVR!nvTSFxfNJ$Mj{|MuAi?F#>o=5c)lMS zpl2(t2W_VUyZ;D3BjDu9_8oQC;7H_gzi{tdLvx|QTJ*WyRJroWp2G_=U#hm}H(=hY zj8voS8wW0S%zY(ZtLJN;(61bl8TjcSFDbw3M)OGz`BqL&QaWDn&Bmn?8cd7fSXa7A zVw6yCdmG8+A{TKKG3zoKk1O6g7>^tD+yr+?4t7YsOwj1d)D~WH*d~iHs=^@SO>)E8 z7;>9tTbOqhO!f9`m|kBQ`@WOq1W6t%DiNyrR|Jd{U?~nFhgeXw9 zI|PGlQP{bk`FE~<=BV@pQ)V)U!j%3QUUmFUo!ixZj|uK&89wLhk(l6Q4ebOIDt}XY z?`Cb8hS6!x&KzR}d-@PUe>?2?pekFv$(@NHqeGhvUXfd=AoNdyBxy1J4Ltp~rFn>eGjV~LHz%rl!3Q50_ys3CP!97aWuwvv@Rd!z6HI1vX@YOWX3;lp3V* z=MI|PD`P_@(cJdeN_0P)c2|sf>Qame0m}De}5~`-&g6Z}(u{kFP(U z+wElp)0ec1sKnIMzn!eW5sNvUBNRP0w){;q;=Z1(HB;TZzcR9aoW)BdJ(NWaZU^d* zOiyXOo=xV};{71qPwL?1-+V58U*}=ZK&&w5(9Xyc4R-K#QZEVycV+Uc-#{huZCXt4 z1tr&Q@y5&~#rjkraAPXn6`Bx2L*0sPtKa^9L4N`EL@@rx1*(OQ3XtXTnr(BPgn3p$tdj{*pS zbj26yc-<3HREDQHRN&gNjZQQloF`!EBA@yk$*MQ z6+nuLfiw6)HCDQ)Vno<~6BOV9ksggeW|rayW6J6>&J>m%)V&1yRcntOC#e(vukyGN z2&2w3Rmt1Hq-JV-Cq$6=yFRfY-GqzQq>y~ipZdnXOA7>Nxg^&)vwqjSNG}Mu8V5|< z8qE4jpd|VpFAS02kbpW41yzpjzB-|R>b%OLv%I;9&gaV~wK*}5pB;N?xi#MaKd$;S zpa7CQQ(nYbhG`m~OHkbR`Lj#V-OD}t_r%9a%(F9Wc8Ss_;-Ss*9yTz@u>>;T5&NkegCiwG0iT^Ry31^z4f^9; ztMl@V?i;madZ=!L+~tD~VEAJLe`eehDEK>lPI+auB;ySAx_Pj0^psKnMH5IXn(Pg*b-F{}h9ZwOD)_5+ z9cp?`E#Bt0m-PnTX@-=q6}9HX3%%Ix4r{mLZ&SKfAzugFTPdpUR`y9Ox(&k9Qk{7#<(}Q9LeSM}|tUNfRZyi2qNB^Ix@byGNk6pE20E6RE4Ut0~9e zmYccO;7*SA>(D5gfLBL@@uZKS6kjCDrjWwm>#<6`#}eNapSNJV+3s&-)7fX==aLh{g8`k$}g2C7rW*2LV;`i zwO$l*!P&z40R81SEfj)`(Dtg~^7^hfc_J=GbUAVg;~yeMXL?)Pn_gdGqjz{zSL4HW zrV}gV@7F!YOSRZsZnkm0GrrL)@Y&v0?x55r0H6=f1DfQ>e$L}ZGwATU38wgy6lEW2JTv-m^&pF=-AkT=eV_gw`=V ztNZrWs9&bOIOi@!XTU*`4NcV*K~~2h=|A5M3bN_qVM-1HLV<_ql9n)a@Mnz5 z_ORTqE_5He7M;B1%;gALp*wxuYE7Ccr?GhuBg?)~V_N5R=1Wm9jy+;eo>p-6|97Yc zs&QU@ey|!C&Gk3I6V^B)4p>dII|nW8+N_c38;t zjh5d3HV#}B2xl;^zrsTCLKTY#kRpWn7P_c>VnNfAI9+<+U^((1BrFx2`I~-(uiy$O zds%^@a0kpJ{|?*oj=Ta4n6q8bUW>^a`3=GA?*skPr@S{Y#VWuP*!6Ro#nPI$IDXIk zj`qh#h<@?bQ8b)`pWyM<3AlsB9mUiXqobzSplQQ(78bRJvl=#fHtEq7%WYlNKIcQHD~EfFMS4DKQr9jt{1P=(|G7Bi&8|Api0p&Fn2W{bx*o*ETgHDiXh3s)U%;8(GKx#Y!ZkUwcMN{sc#IXyV= zt{Wd4$xDzfm*+5@Piln2^G4i_xt1z+a8*Hw$-O3{nx$lghu9$uqQ0H&+*8!#|Bhoe zutE;f#I!Q@Xr|%3rOfW=VA~L97~wAPRKSapezUepIfwhV?ROwTyb1GzjI8V`{_;}H z8rbf~;QoEvxH>%R@kYUi>WXs3*D{*PW|7I_$A4S<{|!3Qu-wcicLQobvnWnh`G&57?F>*KTIzlocnhKTkOba&<+4^(?_!A5SUus8{q`US@ zWfT+Q@q9-N`wY8cd+MvM6@@9&cVAx(*RHo>=4mfH*JN6m>(!m9q7w*c?(xqLY=C9C z9v|bcY7}5?|J73XJJi%;*o#FZ2p^&C9_zZE%lcu=w@ir0ZQmEtv|%|q1#@onA8A)_ zq`zpHgc`3AOXVAo<0KiMU%8m(3glgp*Zx+$xw?!p|nde;HgnzLolbNJuq z%aMcHJ4CUwN8Y(E9sJ<&(*5WFL71&#V0fP?e_vAOIL+bnBPIMe6^HLw*%?f@CBz4&I63#eyQVixl)OY@PEE` z;4cMjm}7${%?BciVzQczzhw|H>-tXG{D!1$yHA7F0 zBfdtHs{Zk`!mG^(lYYm0?{6}n4%lBet-{NyBRqGJxSi>R`(tB4Z&OCtpKeWDrR0De zgCZQwL%W=D_Z>tN>@j<-PO*r`(>>tV+G5jOyJO?q_AWc-FeZ{KK#}{~qFkK}W`h8x zmYa*hH{xJ+hEawJYSA)q#0(K)>7p-W9<44($$Lexll#&3-g95FusvCn4A#Fxf0VRz z-UVg}KU_iH*0(k6LMzJbsv_o%&<|G8!u9&jd2qUje@A~1vgqhOj}?pMS}-%ZwKVpH zl+cbn`MZJ5nwcqZ1n~U!^*qD7@ENRu~k)oNIm1 zytw>7S(S)o#BB4 zR^^7W#KWAu{uxx|Ukl-_>+h9XO~{1a4X~4MlxDRVHxx`wIf}B!n@m>ZTw}U{n8yQc3)W2&iDV1u6K;CwB5Ev zql#_Yso1V0Gd3zVXKdTH?WE#VDzf z4y|(X$!GdKi!ql3YDm@~MC$O2i2%dC%mCe`c~m#E21w4}y+Sz+gd8^*pJan#mi-cj zp7&7?)e|+69>96G;hUfk35pg#&tOHqW~as0b?47Bp56OVPQb6Wr?;Nw&O@eg4@;)j zPIsc-in^=4il*&9(biX!kGTe`$>9CME$nkdepO<*ny2SPQqRtmV|}Y@{-L2*%P|am z3ZDgHx^Bg_A4flHFw4|KG zfwy$$_&bpvcx&qeJ~iGBXVvyL-N#-#ml1ikOdC%rI~UMNfBeM95|Ilq)CN)ZbE+PT z6X*WdOaAx6`v1WPy?1dd=K&$P+=;FYv&#p;6R#b~K#G>WQ}i=0%U3QoM5A6xzq17U zsq_;F0=jFHto)@DxZt3ix-vUO+^(8U@>F|X?fm*>)V-@zTjIIAIRjVuG9Sm5V2u|p z3?J;bv~M>WEVC>X3W_ywr1AVCL^*H>h^_&_-4?azg!&1m1V5&lA8v^z`WO=tvaqlKWZ#e7P~uaW zHr-$jOPC}r^TF@d0YYBtl;2z})59cubz)=}?n5c5n7Dn)rOfywWl2Z{V{9-7F#}uY zfI0#$GxooChi_amTCIwMdA!V1k>2CaLy%V~EEce03zT0uB&_Bz2|RN{o1`Ul=ukXJ zDX7TD2Aey9m?EcGxN>I>GCoL>)k>fN?R!|D9fQJ3j#yd+!^SU^M4RzxZl80__QsSK zISw?M?05khmy=?;Euv|sr_Wb(!5>mou_aan&ouuJA7V#~JYD)X<7*OnDy&O6KZaJB z#14{7f%otdzB))rhr~{kIPX@1sxpGSByGNegkv;YozK#qLG_Id4#*4Y4~R2wbjW~^ z(fTvTrkWkFSEH}Z7VoLrSX3x#KH^am(pWfE_-D7=FoS1|!-3rF`-N0#??`Cb{xa9} z9CW2J#a_Rd<%J~Gp7pFp3@H~)=#qqRe9VBBKi$VI<%hS4uiLzeVwRk+$17xa>OBYH z?r<69P2BBL)s5Rm7mwV&wL(Y?HRn^%U?TQ`r5UNhl7HczdB*kWR%L&*rP}tI{MJxl zQ%oPpt5>_*8qf{Lx&C@`c;9P;HX&O<{OSMYQ~%2nGa$hb_cBWq34X!_cbAED3#}8B z!Y0Vv-hFM2h4qrb@o+ebJ2PWWeL%>MgW7bQD>=ztVHxY)TKe0nodZ_$VGY~)!s^M$ zTI)!y)bB8v(!v^k2x=}dV$bn_T)w|SHD1yZb5AON8V<6Wr(e{;K+a(0LAyHoj`eq1eagtE3f_i`olw&4E-c_$-9-o#OF^`UAHBtyo-iZ!iQ{fFYBfK+&- zbt)cjyYN&MuK;VCI-UhRiKFg6kB3Q1c%Q)9e-qm4b*`7i1WsQ@9~AM z`2x(hQavNU0q&>fd|$v>0eI3jWq)vpz0{~)33};?`2RQD{KrX|@qtBv@a<(P{M>dD z^7~b>r%=p+pf&J&t%VFHlhd}hWM5YO`4x)t-g_f)#sc2{&e8Dzu!aTR5* zlUVhfVEk=rXgNJ4vfxa`>%HgH?Fnx?eMBwaI*7NuH)Aj4P!D|3H@uY46Vumf*?+wf z{*C$3Q)u{iab7MSny)9b@AKAjrxBfiNATiP<{NeJjA2^|I|zk5RV+Di<-wD>7MuVY zA6alqkJzBIz(ng_36!b+>k!iR$D!_EicT5z_|#wu`&jMvW0&Fq(@~*zdzRy@v}4k- zP;2W9`VPCGk&Xk)4J@ppqzRzEsehr#Wx%z9lO)#^wGE}soJKGxcGDYcY|&cWz<^ol z@w~5b00TI~9aH)4Wbvnj4GZs3FqSTdK&GfDjZD@&+j;u5$Oq#;QTsn){C5zyD9LeFHr?U*MOZO|20lp?IHl}$H4+u6ViA^9eXwApG zjGzOh6Vhec70S+f2?z}W1`Ra!_8&9q|K%0jU9=2|MBrw7Ip;Km#q@e+!Ss5B4Y|LN zIK5{ds+F!D`~dTQjjld~?~`sq>SJ})4xS`v+bzEDR@3v9c*4w+P+vb*Seidl_PvMs zH^udtMZ6Vr7&?Mnw!Q7Z6h-k5Nontd| z!TpX|XZ*eTmyti!ZH=+Cgv&(_Hp>Z5RP0tv1>=z{W{{fUY4!JvHT)2iCB6W5+s(j+ zwO@9a!3ZOQg3SI6Hga<6bpoeLxe=wY{YE1BghM|02pS0MVVdY^%SG+rv zRwg=?rLcm?R>6ezPYegq6JEFdVv4cb?goX&X4+1RNKBP5afgYB95d&z)zRG?qXOB< zmyy3dC`zS>H7*EkA9q5>Oh(x#?xA7=gPAZIaME`-kF=EfL(0leKwjrt?C_#GY`LwNAn0QP?UH(~19u_)vj^?=29;AW28|1p`?K3!iXD!t9Rkiix(;UdDR;jivRg=OG(`@6TK4`E|Wuo>X6ipz+C zy9Fct$oFQ$^`&KH`AxwTfSyGHR_gERFb+ajD8K(k)F$xQZ<=<|8ONa4l$KTG1>(k&G4}|cKJzmf(4tXn$o9z- z@dsi1dMl%{b~>Y$q5V%t;s1vaR1_rVUgrns*LN18Fj4lh8xn-Y)h=5gZy&~z*O41? zObG`%64tY#0Li+NC4{4UrH|pHM9s^HnMNzh<3x=0oz$_tWFudRqD}9Q0}Cac9Zb@$ zwp-gs&E#xGXdjh>yJ6zR=M|f$xX?@*4lf{?zed+L>S8^+68C(>eP>$|<|Nrt_H4=% z^^|gAvodPbV&-mVO`N>PTc_pv>gDn@zJ9ZrOp3}^qzdJuPyTLAbVf6$37ZPm)h>}7 z{65rUMGu?|?Mr(z;=Y_#b$h6&iPtkxC!2i_Nzz9Nbu5flSfkDJUg3FNq6t=8j@d1|h;>fg6dG4l=(xNh-a z*w+If%;-jwzDaw>2xE1Q(7L{m*^y3=dz2Oi=6pEwXR{|^hF2E0XXzY|+3&U%ja58& zxKNb!{&%|gZ)igbW|8H5S4F#(=}Pq(Ok~O^ewa_j?DfMI!tI1;N8b3m)14-@}~$ z`U-t~011g=>mp#Eo?d#UaybZ>yKpFup%hTgz^C$nZjeD7>VjI%G`b!Zn3F9{csAHI zk;kj^vZOG5nz+#=^a=Vpa8w2Q)Z=f+$U!Fja0Pv1#|LZi@i%a$0tcbyb0u*shO-6n zuGSu9vGej4^i#zQJls30Cz2(KL#%k9LXu;M_NHnFNtTQ?nT3jgj$il)Qc`076U%H{ ziT{ZCQ$Y)R)sq;j&q&zw3dd=*I$RAA9mT(g$*D#JG=8?su2^zGB8?Xa3 zRoBk0u5Zy!BNogB60@@RzfU^!QywiwcVau=4h?6Ygw*Qm6l&JZ5Z_o>{i>|%9$O;H zDToKA=@32y)H8iOc%pm2MXi(q94E{ptL%yi8sr9hTHSKEHxJtv2 zmXs8dlS6Fh>o}t2@{fvXF;4zpMH)L7l5QhF#X>}<_bVwM-?~ARy-U6wegpg9q>v(W znDk9Q31`jTFo5EpI4XznkCdebEP=O3(vlpq-x^^GPSm zSqmC1TW^KQ5WQP9$bq1?q_^)&-!<#gY9h5ly2@h*tnU*s-7Q`QuP5qNJ0tti_;!X{ z>cV$Vo`Z*hkdFdUxJNjl0CMV#rR z-CsGoy(BujZHH?B3AFvOsNe_mAVim)AC1nt^8p}A^yy>?$r!~Ixymu!F!fNjQFC=B z()H$ZV(n2^(%~!2G}I+9c>XuqgcEgUg`dW|b}Y97i;XDGOz^f^=S?fo1?;BnbzeMvQcR_>&6UBw1T$3=2C|0=8|t*>y>`+ zP7)a;d#j6==dKOD#+wiQlAj&N#{KnrvMSLppL6$XkVg12#h_>yJqp)>0aE9lG3fbJ zk7LyFVW(ts(X{{Yu5^$1P08wDU45YalW?RUGPoX+}KueJ`>B#ZWfw) zR?*Obsp~?b#^pX4uwV|bU}(Ewo3AD$s8dcM5-PmNat1h@b_6O5@D(dbmh zw12dez$nuc}rY&nig$kHN+te2QfAU*(N?>lYy> zwTpSdVB6B<`X&^z-&}6}VmrXtUpadV+29tw5N0&T;??YB>N`3&(I531}*kYwpU4%*@h1z-9|q{H>RPoNKP{2jl~$5R}BembQ3J$_X(#OiP)_V9V|UG!Vaq%jOg|Me{F{98z}So z-<@B@*wS5|kkYFDc-%WwYll7$39dxu1 zaXO?21fT#b__I-;#N{A4GT0yz@w`_p*QaNNYmxB|HbU5yN3Y(F#5ur5g9cJ>(msHDx3QU*$tA)Wn9szpKVv)YQqRAext+pOhLRFA2_Fn!-JpS z7R>ic5KA$@%FX2}{wj?AUu~2Bm{D{%h_aWlpa>*WFcjy`D4kfoD<0`5wF34Io2eo# zA29a6gnLOsfh^&S;m%5f*+X2f!Q4^^XnV32Aaya%Td*k;4qmX>A0053f!H z)V1@GzNZm8qTz+A(|AW`Q+Aw$*fG#ea^g&>Z!5gGRH_poA@Ry4A7~|r@uGMJ?`2(_ zGaUqTUgiFF-3w+2ce9(NxN4ei^}FPcjdh*Q@c6?Z^8IoRWerEZi=DveGR=Rj+|kab z2KaW}15f^XPprhCd1g8e3#dPeDb?$OOWnLBx2YK)oF7cTtT%_XP6C4{+&aQzKFMdapQ!h&wHcLAF|+E%Bl-w)Y#o^8@wfV| zPSwC;z6%}p(d*q${B;1;}g@ZB9 zW)%3|flh_71Psolt;gQezYfheV{ze>lc%nUiC;&VzeoOfk0a`cdBwvxWq3KT-*@Jt ztzi3auLzulKC*SSVauH2WdbAd=`lQE?1Cb^_cTfz6#pqlV&_3N?FK>IVW(wIrJosG z3BnJwbk=N&A=rWz271_%%R*J_T*d@($5@{N<8(E&;o+*_h6BYZp*CocGJ}Wtc&h>r zJs@ai=T!Ty3J{dLd_NV(OS)QZXdg_wf;X`?!$kGena^L$-5ybBn2|5vYw?$&NdscD z%|OItKLo!=S%upA>PBs;9T@aF-d-CDQ+OaYP6^+*8tpt)Xn3JH=JhUT#({VAsy7&Czu`z3JaxF^ctPW_qWZhF4yYTklPzR zki0&hQ`I})TiIXm2@F8qo__%-;#cNYKq0^hOsM_wJy}Ivzd@ydgjhC^dAFZQIGY zf3RISYcG|&2371Q$x*R$C9Dl3D{L@zF2TlB3mI0cZmv%@oS7q@u#BihyP^!h|ulLJm|FQW`WU%P3Z9#S^6H%C|Dsep5=_EZmG=B#tRqe17 z2abkBxu~n7pi6YzDlBzbOuNAlytlt{%FY#G$d!$=HaIc7;eC6kPn>x9XP25x( zDbxXw@iw~9FqAHFn44|eJar{_KI{?F75&?@f_z{T1-(v^gw{&&IiNe(?)O#LE-UEj zmVDcnr@;h~g6J&v%>E%(2<=DGL%%F-hOWkJh*x9wf&6{ea9iyA@C6FDgSDqQ;*$$M zfXG@c3MfMK%(tcZb?8N9Go(SjUd(5!hwER)x=5M+&(Zh1Zt@f0Gu384qY-FagZW@vCTBag0@C_tQc&Q@`>>6e##}+WGWhrqKins5| zMrXfr7oLKMIat&&H@X#v8bTITCPwLleAUUlHp+iZsS+o=gyeEwn%HpZ)Z641M(5o` zwoHjC=aOkqZdp`Vo&;t=DF^-akt0z-`EybCi;N(cR-7358%O0-Jd5S-b@fYF%V-?} zzV{~nirNAgKg>#TNmzL&TFPjpi;vrlpT&J#eQ-EGzGEc+BBkqE?Lxh| zB??YX$_tsDrgbBiZ`-`t=(B;62dyrzn+E=%J-iwQ*@fvirVvDpzr#Wi0*4qP3JwlM zMmo+%3JL!e8D$yMc#amImIRFR<=f7y>j#;NIY0Fol85^MsQSPp>jdnZ2pYT;ng1AT zo8eZaf^%2fv9`oCdg$$n(@}?|L&?EQ{qXY~+<L z7VDQ(kMp%oR%f7wpNZeOy54_j$tQKB`uo9#hv$k<4mKYl^4hf9#V8on6t2eKq(&r> zQ7eFpsGf8PFWhayd*dwfKC7A#xN<2FdbG^dE?`TwP>y>kC;;1$lNlM9{>ns20%}Vy zfznV#JJwUTg=x6vB0(w|w(me6>>K}F3lkT=lLh`&PrIGXc0##G7*ZyIds_Dk3E zdzeC*gh>!vQW^>m>qwCKu2YZ9I}6wUUYy@3p?AxwNnYRoc!~(EGpeM9S^}fq&W=Gd zCMYU=d-|JIuc;`-n)r5frNMueQh((|#VY-gS+g-M0-khZ*?x;zeggBd1wAFDjAgZrq=be53DsC_5~jY`Ed<}oqAjU*DqFi8RC);?J&qcAefRqPTCL~8qXZBq zHc5N+AuB5>erPwVs$lB9)<%${<|F)N-OXwu4-emZq2BdW^Sd?^k+d3R;p}Kzo}))R z+`B@@cTx2&Lz=&z54?z|fBKx0pX=slxD;(s$JZNR;C!{Bin|zl3I-x;-9}KqYjvXH zH5KL6YJd9u)cTm&UuQD`__Dl&IU?YA#?%bU4m;dEw1w#9aOm zJ8wO^oN&VWV=xqgq5kryYGGvGRA80m`>@v zm+M5((EVbKqkcOv23&aqo$l29C7}~4ZDeHsbr{-E9`k-;?}(QHPSyY4=)g{lqR+><>s$3*Rr7SP>Uf%y`{)GA5+W zyEce5Yy3$`ZW0m6b#qyNQc~!KxRerOhh>bD-?vC!9K+@Us62w+}tXvT}uMUs zjG~r=_?V*m{6lexOVKhX7twO)XFc-1(Y6HM1dLz~R}&*aXe=ILlr>Qpfwq*dCW@1c zMh(x`&YGE(B)KA+)5&#^IJG33p@Lv(O6jBMSS2V+cF!gC`_3s9con%nk;{0z_JMr- zW@LnxgmV6^N=o7aC(lPsdYYgr@)D2yE^o|$%EVQ&fJvt@Ad11uHStQL)5V3PGR?+H zYsM(EU3t;QI9>TUG(6%WJs6E=XH(_J4YT(Bnb+CqcahrM(q-q4g7%~%Y(|K=64~8A z1pkTFZ_Xubn+>s0pvnZqvZJ-BPGO~iR9beCohaICHjsaLV3Qa%OvK|WYsA>uU$Qa> zw+hN@eP*-pawt&q}^n!+Qno4fwX^Mt7 z6SZiIn-3TsG$As&o3I6mmpnov3VT~8SoW1T!;6LC+=vWUw(U#vIj7-HmTz?stejB( zB`VlzS!+e3pHMN;SCm?&H`||w*-iHouOJ2K4RJUO7L+ZBS1Ga2D&&_$K5GqnI(_fa>0%&WVAJzHkNH2O4I2Ey;Pit1~Gy)@;Kzx%z}s zYq77dIcEKM^=iLOD^1;utM;7R&Gi|7o#P6%9c=}kQLaly?@t)Hg?=l)e#){54c4=;17&*j;7Xq0pN+n8`{NK(=4+*9ouXLLwg@!S}(rh<0RX zM{8?}EKz(e&Z#k4T3`6??38=dLP;h)X`!T5eg>}hs$l0mJ%kH9bpR! zYQ}Dd&V9?TbF(|!$9M1&r@#^pOj1+3?0aYH_*0&-C|u-kFLTQwVIew~PObaPU^oKe z0r%6&<4l{M~jSt_K{YM|q?H7Tx6hhm)RQhQ|d|7wO3=p9duteteKK z-KUt{*u!qtA11-5Vw+84c4<0Go~-PW%|GWqsjD21QwISmC@6vr`+3N0FIv%6sFBp$ zTlH-&aYT;**X%0;B9!p^toRs4^FB+|8Sqm*l-98MrKCKenwFe8VtHFzrY2+8q>1{B zk6yJ>8FxdaIyoPNlvTSRUv2qHjRsW3DlI%=Z4RS6EXvr;1pUPAuxP)q*n$HPzKCO= zIvOHEKKjLdfPm9v(1Zz`Y!Q47w(iFDB>RvIA?k!&)bF zx)JTX|a{Cbkj}c1aM)L_%?F9}-0lRc8Z~=lYOQR&kL890YoWmlH6k z^V#nnN=ypfGXa3$=Q$pKX1A+GDCA=zKfA7nG_Nt|u!n__qPO~NE_I;zkX-oiMhC_z z$7QgmKD<|II+^0su73ghvWkC zR}dGo6ykgxalkh>oO=|Dh3ELCQqz3%S3FLGrA;=7eu@HVM?Ll9T~l14uC=3-7MY~t z%hi~PHP`&2a>$(t&3waZOiG|FAX0j#zv+%{L6zR8VO{oiw%xkCBCg?!!w)ZSBM zc2kr6u~;Y#U`VidZ?1%dix=bmtBm404W4)ldYZ8tG%}qAS z9$_8g`>uetPDU&}-jablWYs;^CjQ~Wd3f3tNPf1j8+vt@Cr<$wI11)8B$}-oKC{@? zukVw+)$i$H<_n;|^L?)FV|(_N6mttby^P)I_@1t)gF+qn%~$EsDk)r)y7o5?6?B5CXxj3=ag+)y$i!tAAV>;4b~6-V{3I(= z7Kf0iOYSX3n;S5H2k81Tv5`XLTZu|4=O!L~d|*{IabJwCUC+M$nK4^D4}-r#=2vK{oV$0xP-25yYIIpuj}2EP(B zCRZnu;;qhx79#8UK+HiZigGXiW+cd`M~z1oT}%w}QBLUupaRf&$VYl z5P#z}CsK7rXqdM-u)54x1?Y1$|A&*OA-eQK_ce=pggY!6kApP(Z`U9t`A(LA4l>{X z#w%=xzuuMi=KSMmxtc<$$p&x}^L_;zBRb{ylIPClud5u*zg8Ce_D1)RgB9Uy$wACO zjXZJWgM4D7OO{j_(D9DahhWl7T!Z~&9DkszkDf6E7}ehc2HGkx30rt=_ekrhBpcqY zvOn!l@ndh<6D-=qh{nl-BqXdJrDZCersT1$b+2O`w{kbxGWJjNN9ag6v0ZeU(Qc4= zqogf^a`F?w3lhEa&@$2wKDau@-6>f@rI+bxln5uO{l^orfjJAdupgJ_6pbFs%RM9d zB)}@J{8L`1f&*=J!<$%`mLtZD{8>?1UeZNdRhRJbbo4f~gD?OY)#>4 zSW*oM+PAjjnh?7Evl!J$?hWQF#}@JVJ?tCh0xB%f75Uk@xrT$WR3bHl3bmTWhl>rm z*@T}tTqQQ@Mc+_AZ}#d4cC*?F8#1b6Y2k2m7l8GOF{;)JCbx5{x6QhqZxszOH<7Ha z=2J3!@9r44o7>58=V5YGG@Yg&a?#zv(ARwzp! znZ{S}b1;is!Ry`rv$bRYa0OG~JVvdn+1>a|KCn+V!81>0?X<9r3(c zy<#O;A6IyfywVyOYc%I*l^R~FW0sKDMetY@F3GStMAsAn1;yN}@AP>a_h<4kH`0y| zz^Scey?xHVyKn={CmE&n{42$X-s*aE_ZzsgI(G<#L2aSH*5z>ruU5%Njv1SW z&6D`b%H_mrt@si!iGy`t|sll}H!M zrTaXO#tso_`f}7dus@?#)`JUGZ1Bx`W7Nej35n;0Mw*3HFK70&WU5;2Wa9mip4GCz z0`mj&)x9Oe#qBMfS4Ug_u$tkmnvRbeOMuQxG=8ZLTYciy*46xwW{M&t+~4!HU;JDj zUMN_zmT?>o%?yuQqOSk}I9*(MdzjTaZ}`TG*^d{o;jX(eNCLQta_h8Pu;7O2ob3B7 z4pIeMX`88hWuisX&zBAkf!`ePKB+)oo3WG?bMv*2SxV=>PY~z5Gl76jwA>G2x=OYa zHJy_8%ilY=FS^wo*C(vFvLl!*S-xGTxl6On8gz3FL$=!js~UJbzj6tEHy2h1Wx&nc z-@}!TpCu2YNZJzCtyjM#EfL6jS=Lt(mb!+Mo)`0$O=Z8!h@nb;?H(QyMN zE@m6+J=-S|TGP+@cpO7Ar+paqKvvb)Y0`1ktG(_vgaVf5YNqB6PT+S<;sn$g_?opc zZ~L^Zw@9*o#hEP7Y;kEk(kEibklrnH>aITjMi5D?-tzLY)!2&a6XB2^W%^OG@6 zEaAGmwe(#F_C9A=5WwtSFL_2-u1Am>-d6@Roqx8Z)Ol5lCCW4eW$Ml#CfkvyTK5dG ziu$0HOwBeD49|^@jn%iGiD;_Ig$G_aQ5)Pyctt#ZQjGf8j$oo#(v8iVYKsD-j=0Wa z?>&F5OMQ_B*Kyo-o{DnkzNHr&lF}KE-Y&PZ1CUdJpPttS@t5rgybT4rhE(V?56lt)-hDR86B{G9J_YSR7IHlaGzhi3eiT+^ z)_=aW${~>zXt|KQQ^Ry~S&vL=lN6qcddp^M)aROQ$~;p79XSaLkTW?v7taf_I6JjU zFjCNdt9&KKpI7cE56h)vP6ZV=aU6~ra#sS6o7(hkwc2#_^s{wGdp~?&66-n6V_n0# zt=P4--1AaEWc1jzkne2Y8zrYgndEfes)sX2&SHIas*;oTEd@J2f>0pOHAV$vSFJXB zV?~#Jq3K)}dsVCl*`4ThUuqlK>2sl19L{5FbkSM1s^>@aqoKQ>YnVyKm#m0hhHHP) zF^tZ4W;`>ZG`e}V4$}#Gr#|=w^3{JVZIxQ{CyBF&YO;HO2M&)k(od{0KzH-zzHQo1 z*k$F^sdzsAwq%!iiA|wK` zid9WXAZ`)KaqP`(@*Jyanc)6*TO4{E$-}IX*QzHTNrEzAPWN%&k@??U18YXk1wM%!v%|; zx8C4iuN8Es_;|g50y-XA9?hkfd?uhN%RFbW*v-_C?F9+bjYu0gq9|k~hR72>K`#ls zAu{03p!u^Auzs&bV92qH4&$_??|GIO^69|WEQ~H8)Af2*uaB8i``EQJ(@2kLyrHbY z`&;<}tA=zQlUIxFbi6JL=(?VBSm!l|=oJZwUoMP=uJ}HVHKfWPVubqK`Dw|`(jTRc zaH{s=oVNpt-TtyYmk5! z>1mrCBu7v7#p0hsR&?co;JIcHNSEM_~Jb-0l}qU7707%}J|Pka;^;~CYQ zC;)(_n`geqR>evQPm-VCkVzUR*5sgQ(S;^RZns$!2pZnn)Iv(3uW!G*l zi^Av%AM7n{*Y|UJyRG)`ameN}?ChhbJjPMWD#imuV+xcvG(#s{AECn9kv%%$CFE*- zr;jjOGaqimjX3f(r5np-oz$CwHC;YU9!S=$HZPB_=Gq_)yW`nDvf< z$?6NP*u`)2YLw(kBl?6LQ3SSuMj3m%oR@t+T|2_&*Gh;5@*&|i-LcPh^@v&9usAcmAlb;ChRi-t0=@(5aY8nv_QMhJw)6o#>45ycMY|HME`HENO)Bos!!b(h%331A>Wz7k|jV!Y@~Xs8}iGB?y7b6u8Ut z%hvOlIe>AdX9E`%vT(s~neXF%!+a*qd=7|bsXG3g%!XzSEHq<3Yw{5kEy|=<0=YKD zXzkWeqUwVgvn47`46-^yejPj!lQVw6rZT=G{zN~#ybKgft%9OdQ*8PE-I0PZ7w3MO z5r)zOkOzcyY`@|a)(9b#6fM4jw+HL81`6eQNQtx$xdJJD6=;aKR&hGAw1GkI_MXXJq}!LUaTHj3jjIZ&*1aH; z37Xwxa}YNsvFj%)>=cjtqxoGp5Eo7-@VoRu+v@s&uCnO33kV0p3s@@Sv)E;%&pg&= z5k0U(g}IbEt3{=Qjzvh8{q9QKa409u_wpWqEWXF5iN&l?+4rkdx$ECenCUx|xJzo; zu9EVQ|EdfNfKdg5eDQw4{xkF%g9%Eh!q9x+V`a4gzen`RI2cP1%iGFbn|c**1lueX2SU$_9o@ApGraKu%?ezg!UUEOI1NNzBWEN`j6um zM;YkRni+M*p2>IJE&~5x3O9R-tDZs;Ew1)v%P_0&k4JR9sFVX zx5k^jS;N_^WCoIQIO3a5VcBH-R0Z^=_ZybZ1&Yx4hq0lhKAFz0r1@V{r>iCUx#_J; ztTn89|KS3_kl^7!HbR~45Oc=gnLJ!)IpY!^TvYV&77{1q#wk9sKgMX@S?#G;gF;cB zui=B%d5klyCu^9OAtV@aLc-{Nm*^)ffzOAb1MtG${zcY-iYd8c+)HKb64t*a!ghRA zA*T$=*C)8V8J+l*diWPBT%eB%oN?XHd9Ac?wiTyK=`G9PJ-r5XU$bw3g}aQbTf8yB zxL{p3!T+8n6DS_e5C*?LfG!!L;%K(3Srg`?5C1KUy+F$z-Id7p!mHI|w_DVXCS*`W zTwqUd!+n;LBBU9_v*f}<3^l`(AR*_M0;u}|5bqkK3;e%lqIkE?HpK|$f7`wJFFYIxbFw*vts;5@ z>KGR!i{$>h!9Ys9DVddnCR>i+h%o9F}@0jmU^B?SSDXZu}YOSjs?V!VNG zY+?)!0Lj1(A}be{&IuQK%YPFw)HEiJ$2}W`y^WI_B13RH7>iFbGTgqpl;Q&{jes$T zb1_4fX6sDA95ibaO4f>|UZnAuL+iSKxSvT4i;rKeG1bcBDHFCXE!ZMxP@1!m-PO>i zv%OD+P!q`ukteAo5z~W1pGbWBZK2}J$yi6NU57$(z$VYc(og(!HasjyUH8{pccWut zJNri>%GL)#*`s#g^23a=xoLH~uDn_+8@g%5OjY&6WuePiE_LOnI>%BPWR)@_Lh)js zr4zrCG<=@!8rNwZ+*kp!+j1%#3V&pZ+@GSKZ)@5eTS)~!uiY=1ouI= zOg7csPpuYN+H7{00n72+!Ki1{3%;cki0wR22NO)#Ydx)0tQtti@SVs6Uv&fH?6?!G zC!=ho+#-NLLo7KfS!pBN5t-8uE?M}tobDd?)MC>+x{z1`dD}TGjak*tzc(pTFR1Yz zpSOpLEJKX4OVU|D?|go?9A8IZoF@)wg!4Yt77X#@9*+;wN@}t6j;POeAtYS3^&?wM z4pkj2+I6R&!rO;2mXiioOmqV_fCdy6kYC8)3bOv@+5Lc>l(_ z7xgZ%+d$QM6z#})^CvwwTV33L0C5%Q`bsjAtQxjT!j*cY&YxAN3@JIMlp^DhfHP~G>2Zs+8 zS)37>vZap>;tR%~)89!kM$-F=^awS?o{$oAlaj^7ann=4hq-rU-CN3u#$YUQ`P0d= zp;=4Wce9aV9Ar!~`c9I}_o;TXZ&G;D=6J@^_tcRDI78FlS#n~Bc8duAvV~NkBbt~Q zB8WD}v!nBJLt|3i`>gltFVEGoz`@4!?$Zq?*bJtob=dJoNJad9zWnC4yXi??%;<&a zc;MLKrl1}78w>7`QOrZGjF>s=R62~S-p*+37xu=DJS6NGb%veQUs|VCK-01jx4{DT z|6%Mcfa+Y5hG8th0|$4P1a}DT65QS0-QC^YEx{$YyF;+x?(XjJpJeyly}5V4s=sQe znp5ZDJTu)r?cFmErca9%xk(LszEf#VZ`-|Nf;^;vuy@uxn}IOk78FR_euvw+jCrp+ z=3x;3ZllYxl+Jv2%f+60RO9AqTeWMO-_~j^*5Mf%-{XyuA-r4<=3s0F_WV{-y4!NN z#spd1J@>IcXRWAE)QTm`{S4M(F`IgLNNUAbl0$CDW~7c=!_T2i9SYMz%)jHD1=ki- zi2Lob<=%74DghC?#bYeE(YfE8D3k!haFbnRfx}|HZ5N%><&9XS(JefxxiRVxibKi* zEsadf>W)v-S*$&j<)^aZwBZm@gmG$x)HuH|3vH*0I_o&WX01u{Cw&~06|kw*^eAnu z7IfP73mzPbpM@!Cdqz6u!&=pi`3VUL3ME?xJQSk~!GRsmK`%`^dD=5)A`FzQ|g^ za|Cz7&trZZw3E9cSadv-I$m)E`PS#Ea(_^o!g)cc6e=o+93rt@f5BJ>(-4!M(rw*d zfB{vHK$5S>mse*the^BQh-b8HgUp@Q2G*xo+fM}8`E2SQTYVSPLoqwr6a4r|*TBwW zlmY#CF#EhlJ^VWXF|29oBbL8aP-51nRPjpMuCY5}`A8f)V9_8HL)nMV-wXQTE%s8J zOla+fLtq>}u9HD2O8aBNM@e-`p1s1_Yqb?>X)o)YhDXK0Mdw1SsW-8re8WWoRJaG( z>p52Zr>^fx{VX-~eTVmCkcMMFgRzlh!YoigbcPbxU|@AR){kkA>@+=?+_nq{Zhece zZyjOVqF(tN$?42J%sJ{RhH0Nc$rJo}0hBPYc&&7N@s$z>mOT zhFE!u<`eR;jw|6eldT)^{ftfmohFd}*1HcYqO+yt+4BK&jA3j{PHNV20oFmUH>6hK zf()F4wqDretJ&2GTL)<18y zl6u70(~yc>@vG}KN>IIV3nshnjOS^GA1N5O^{TwZaMeP2ulWcU$L?Tcn=mO%N`uQ> zvD)s0v)F-U-F`#igv-QYYqQ|i?9jkhy8&+K;`laV+f@X6an;Wh>*)%>zhhe*Wc>(1 zeCmx=yw>hFw(_WZ=|8T6&zwpwnYb#RX=)89FYf=e@@&(kc(>WXc-n1$rxDm2-mkw{ zn{FdJu)BYC$vHvRL*!jtZo4`0UFS_3!}H=}*}U?)TokzB2h=`DpBq(S{A=C@Fa7g1 z^sHX;@s*(24G9?Jb`~;%QrSKuzwPTtlGaJ*Yn6;1(0nVQw+gZDxU$z>{Av>TnZ#-1 za&`LCWxoB%7S(Fmd$34SAXwu4+L*7PGrq=$fK7ZD+t5|5(BUe+5x8McLS)% z1v;CG`SfO+#ErLatNoeD@F=~Qt=)95t9$43z4jMB%C{!A@+j$U7Hv(W(ZWXRuMRy= zO{TnY)Cf`MKAc7Ht>n|WJt9doUK!2e2o<#Jl#_H@u7Op%pvZ0xSW%0hJq%0wIvrZu zf1)Z}E@@EK!693FV!_Lb3&0+L?+x4{-zdN?AR;3KSdT540WatwvL;ok{1GtfU|p-2 z9L@~4!+dmrAF$ET3i13%U2@5xo*Vx7?%cEpf1vW)(dqU<-@NEr?do0*RDu_@^6lG=>wH z_S*wQ(aJ9M;P?o4a6I}H-%u2PnI*8Gtj(Y}H&3+TN{R=wqyvg+U%n!m_(7dJZV1_b zoSK@7O-&7beR;ySr=BZQ^&cD*1y-a5p{e*LX}Ofr?8hKVAvk$WfFHE5>Q2?a6p>YbRb7(WpJ0J;XSEJ?mQZ)d*JQfc zuwGHdN;MW!qNqP*516e@x7HB!aZ!yiD71fPe{0<}7V*X5UA%FdSAMu-ZQwGiTrQk64qhi|i*i zTVtq#XDya}^DfN-2I~0FD#~NM3En+e0rvR}VrVPh1`-fcTTeu_f^nbhbqFsfvqisK zwLSeDtLE%W8=2OQVN!p`QmGy8uS)tIaCL~k9z83zGQmyw)9_)JS6mEj)^atpsD5QK z%1IZsq5$htIt#%LFH2n|`J(e#)GSJ!u3 z4^~ZnkLU*#v5txIBeGvUOvW^Og4b!jpP3B$us5+wH7XEW#fa&CdAz^UlCHl6?yd;U zQmz?%UFK137cSJn=Z|=*6}s(cE{nVf!^J^1UGMJriN#u?->W*SgzTt^cTtzE-4Kbr zP8?Bu5c*hSUrQ?{DDo5JWVHZoZB8b=!CMJxqNSzC;TW|jrDNfWm6(Y19?lKQw09tJS9iWQ0-^r3eWfMn%?M)g#1aYi>wi9)~gB>0ebS)b7uVA193J zrhSvRj`dG#)BmsIPtv}D@BUi>xo13_oX{a_jy00TX^N$V>5BJt^6A-LJ#TAOwz9!V^&m~D-zsf~Mak&xD{OGt% zb#p5!uKi^Jyb?l0rGik=(mKOA0DI>u(gUjyfmtoFI$(6f#NbU$P5X0|YxGCJ`~DU< zv!Nq@BqTPLPJuj6m|=L`Uw1Tm*tjQ?03vlvNlrCL;4MNM?eKCq#F-svw4)4KOS{@5 z>|GtctUFM|*OuvxXY^zD>y>J<0n^@O$#!q zv=xyEuMa_<3e+^yg}?%7lH>m(lR?6od1N*A*>g{6TXCYISt7-z{utu6HYjxkUUn2| zB4enPee7e0RLWJTIcBlA>Y|$lvj<3lXCt^WoE0e)<({OFK~#lZ+{1M2XE;1LDZI?7 zk#l0)%&E`W=~FE~)A`UmeIc4WGZc598^P&y>kre>C#}1CQfg>W(eHqc=Usldj-%>B z81|z>533}Wfz%fx%v<}6ib|lp-%2Pq95{Oe3V6Vmg7p2Q?Yv7Axc1qb9i2}ksVqIw zo;x_&(9}xB^=0Nu%h=wzW2q#yr(F9nFN2ng9!1uJ1u^%dC1pX>KGkBrR};fG_Fs7g zKvj-cUXYhtbL<0{OuKx-#tFIA0Isxm6kh{ZAVRj6XqhclcC2Jorefx=hv%TCH9-dJ zB8U^T6Trb8OTk5QE`Jq=fPwPxRR7#EEhgV5nFP#L;ja?Fwuir|K=XL-X<|tPVn8H( z*UK|k(7uu+pS|yriU4_;3q?Ug)&1~Uo|~+)0=#e%a#sY4q$V(OohYOT`B3IHLrJ>Q zI)I?MkS>dRgd{}8+b5vz5)RnLo`gV$C@5Q;g#2@uu%sjRs)q@YR(cCxK-1%c&5+hI zU29}#rsK9lp4ZnvPWBLns4{_%Rb^SFOweB^es2eli|Y?)YGKL|aga9EIeqnZ0N(nKjqu^Yhf`M=8#6PC z_iz|C)282Y7Qke7$+PCF>K{4mFPzQJt*HDyhB41{lXp zx#|e+G zp}bcMk8kq}@nKX zbv=f;4Us`UDYF>%uIBOkn7pr5^=}YKab+yIQwFUAvaIbv^UE9AKJ7#6WfXb84Y@QV zFyk$Bf~{O-G4qEjB{Lgi#A{4^ywgWA7b4!5Unf`+57g9rYMZizq6m>~0oc)^x(e63 zT;Ca~+R8#v`XdisLbz{}JPkwZM!vjXyvO2qyyn-6!wC$EgB=FzT$Aue7X~tN^tVUD zk{Ct+sP0YfULBiO-+|-kZZ0T#?T;R=LYugD=d@$5qTrLZ=Bs~)*O*BOefyy8f z)JND(vdM@Nq(X2o^7wSq!&+$pT7CGy?2=n<#m9^iJ89Ih0WBI;2j-jb?Rh@gPZ@|Z zPTo>JMZd~$f0waQBF2n&bl%rDUguKUEF}y~mNi54jN|*!SONm!8b01EK*GrQ_U`E-D@?VVgU+emp|4R}Ml#FYkvy#l4 zwf*s2)$rpFO<0%7|_ZAg_uTJfA1sP1>>tP%**vT8k6Cl zy)e$B32j)U91X9yu?iDV(TN_Eb!Nwt^?B!5!0d<*@%^$|Uh!{jrc8iksZHM{TcgHi7$ZU3QOr)diV7B)2=Vg+TdHons1{cu zvME8l0>EOdYiDMZ5|Wj>e(*gURT#I?TmWUSJX2kssitxpOJfbWqLg^nTr(Es=z@^zwt= z`mqpi)a2L)i0u?c#JRN>rixa0D41(hMb=}5 zu@4xp*=<)K5hcT`_pHqETAy;i(1JM$@>b~x@g0ZZjw~Lj%~q6}uV^|`oA)?cKA*HX zJ5d*ztD?U9D!LDC8qe_JJ?428Cnm~|ge3ZKdiulttX+NVrH##?J;NSBSqzLsMj(f| zL|t-EGIJl&LQxT<M zd6puDzP#`%wg?KMT#TaxL*`%lteUPaa-k_$S`-YbcuQ$H>wrCKr~()J*TGH#xZ4J4L$VNn&}y9)|@=n=Qe1M=VD;03mFbLWWq%KkRB1p@>HOFVBnjL(J*#%Dm8B4 zFyX^Lyu2ULgS6T%1x@V5i4es=i1N~Bi=A`u;^RFBMdTi zq=h>Bhr4NcdL?P8`iohgkwtKcHnSrN!M=VtPDo}YmDIXe&08EgVek7CrZstqf6+fe zySXCWA*47RZrOhtAy9xHw$gx2bwXWy)?wWd;oPnsnlH@(?QbVH%68=a0&1V_fv0bT z6U^0s{XTdhoGwDU{Siv~b&c|VH@FX@`lvmHjt=_Ba5k%XFQ*)5w#pA(GsuTq8|_c> z_5C20QxPr_%oHme3j_&o3t)b}ts+sr_*{u3Z!J%VQbtLOG*64=J=}iy^Ydz4NRN59 zPid)uFhp6!hY?3n!S)ycSYLw_6Gt)$*aZl!14rS(!@J7*tSXYOUD?g}wP@ zf|w|>qx9X1d^9)eTW!&Nk>gD`U{kPv>HD?0hSxquhD#Uhylr+Wo7=8JncE(W#KNmg z>-7TvLBUq9%=t}lX^8)(a8%BRDd4?Zf z)(qS2wEcMy(|H}{0T!~$&q*j19w*#UibVl165hXz9Ef{J_g5Ss9XRNNrJsPB1aFx+ zt)WuRhIOJdFL>5eGf28v3kfAcLn`%Sft5XN6&LC_5`>ft>nsz~8MVGm6H5yO#K*;v zn4iBd(^LM)t$YfyT$cLB|EY2R@A9(kKawa21e_} zf*ve|Vby%Xs>?x=-N^XiX-Wfy)9c-%ky~s+7UA9dZgYPB^2rq~TH#^SusKV3<)-)c z`0eUuQ9Y1%s{vL1c013a5}A#RMrWQ(ew&0U^2emp3g=%b5hvczRQ`ID|2xmg2beH+ zFm-gu#krQz6J^h(8}Pflyuy4w^zxZO`k-N2{aPYZRQY9roDKsn>>NGL^R0}%oR+fY z5dsVpfXpl_3f^a83RqAcZ`RHqVHWX;Wf3O7uI%&cI|B_~2^-Q6P7x#dOwaJ%GT4-n z7F?_CL+Qw{q^AUgxK0ylH#VzCeDL6`SI974klHXz()z&M>Ohq@H1~L&-=7AxZE9vj>TA=C;=E;>c^8+N3e3d5e`aw z`nX>GxD!WU{oN<3)h<{~{zgR+1sac&F z7O`_^O0wTc1AY`xj>)JhlIB(seHYs=b9-Mxo;7}ijL@a`<4y1L7Z#K}>oZOZ%BQE| zL9|F%Z>t7uEx->sdHIQakthA4XRR~7<7w`vjiUS4VIms_>#xZ?eP_m}Gz)&7o5L8L-W@M6)?Y?ydZ`#I z14me52WSL++(DdjgNMHTvFCrmf)}0W`vO?biO$}Q+XS#i<4TSj02s}a6@Q*O>Xret zKRZ%Wpe5%K$psBR+kK6c>OL>a6}2VDj^-L&U&_d{FG1F{8`oufvzA`ZEgslP0C(#D z*})UqQ+P_THT3F1nXJ1yg0NYA)1tHEnVcJq4y791WS#B{ThW2+ ztl>uSNCtU+i0r&sAo3Q@k@bZA{#Jr{*I>{5yEVo*=M_7J^I>G+n-lS=r&U%HI*Ow7 z3$Sn|s#^;(JJ?&xmdonR9xaMPM}+GB96H1G#J?a@S9feXk|;+qPaL-iQYYChLj&%= z@-T)Mow##tu4Nv4#|}u&YJxg7?|9u=ZoK5Id2Sx3dhu>YZb6<4uHAoVUco5cs)m}3 zLnU#D%T$qs^tiRe9t{Bi)G*Nl)U=f@pl&~8xxh5BzO>3t=4b#eNgADga1ZERG`!I5 zbv@Z`xz^a|^Dj*zBXqwcpG8s4T!2Y^HHsFn3X^UHDHmQkW{08=UP1 z_(I9w_*@`<5uM(X;&4prauvDj7{DbuP$GE|gpT#*tJj*H63 zu=x7z%f|||3*oYo3jQAlLe=B2s*Nt2sueDYhNK@aAW3jk`4i>kVv;RSs7)#*TY^iU zRkT(7zW0Q;YH`I-9qI&ak6W^5}nc#+S5E1)_=J~~kuoL&XYl7<6M)`EY zp?#XswMM;QclE@nqU~cklZa#XFPe9Zb|n6wKwhv>LLhre7YMZk;;oqi)L2;)V@~LQ zC<-jTkD<6HZg^tA1?F%d=77xOW--h)8bZ;5xs2HC{>Jrxl#2hGP!sWjeiv$-Pl3Q9 zpGe~ZDaK?Oi+BF7SpSQ$v={{PuCsn)P*#+;zR}`gH;MX;Z{lxj{rATS2C#H$5SF@V ze*L@J_9}O3#x=8*X%)#8^HGh_lc|YTwK*A2`vHq0%}tlkJ@~r1Bem%YX!N>XTcq(5 zS1k5fYvs4*T<*+o6_n-QYUm0x|L1Gc$-XbB`lMgFnH_8C-=woNY@kU-&P}%kiI}G^ z2JKoFS=k$l<1k*d7TYeBXl}BF?op?b)q0SK9`q*u+DeYfG$^U5;4m4oYNFa`; z6i)l%ZJF0aq!-HyOg9B)BAjJRRoa_PP7Rf;jVrAl-4W}%&9iUIg+`Em%PmWk~ZAJP61^uS`UILB`LYcP6g%J&F z`|Z;l+ulxc@b(ov=FnS9o`?93E-hmci4)|uhrRBOi%XG*OG$K60w?CLEp)<*{uYl# zJ=K4c1QwuR&pYn0yt^nsXyYdGhC}e8asw3^jzcvQbZ84jFz#%GNfn3E#m=E$B=*md z!|gjN1t#ass};}dOswPHy*O}M?^Tif$FuCpN8)T5#Z~C-AR~=rjSfXU>yoI5#Y)H>g@2X5cJ`=b@)~|R0jlA zZA>Q`Du|`0;M=+0S%=PC-Gmhjxo?z+Plt>{t1*B09{?ec@)6^`&;KPDGNCPGK__NK z9FE>`==y2SW#WU)N&VyaPP9WIl@>bD7)8OqxN>jMD1!OWKT z9Oe;0@aJM}jH7|v6m{!3xv@Xq9fy`pD>Q0UjP=IU$N3252v3qB{>hnrql9@ooaG>n zO{4>Ffq=011a?9LQbHyNfa*NC$dNu<6+VU)A%YqSu5^2M=&Gxm3xJ9QS2P#m#Vn?p|Qs(1Tnof6Yf~JRf=?0YJ|sCt$=rzvp7;2tHI$Gs4WeLnZkYqIsj}p`oe~oeY>Nxc`+WQw3 z_@S_$98I0VO$cv#>AxqR)9vw{JM{evP(6=b=Q=D5GB)6_|?Ob*SCZv-ajf zT3|7Sz66IG01E!7w|qY|Z~T)@!)rYJV{x?}V;_~gf=i8d-Ih9Q&ny43bJxzj+fYfG zI(Cv*U^wk(>_AlCH1?-nyaBwW3Te*S`l{W!v*HjF*w8AK>5{0V%`D33{tIC5IG9Pv zy6K2aorb6d8L_g;)P~$3qvaqrrk^P@e-{K3q0sYdZR{6FChWiXaBUiIx*JH4;KUT- z>gDhkD(x1uqrUj@^n?HfZ3dvX5Gs66TS2grN$eVsP(Orp$sq&LZD^-aqYG~5e$(BV zfKrAe&6n7a7FNWR<^9uW=$X~#x?<`)F=Oq#^KC;rz9E=gA#kP?=DZK`E!bHJB^*7E zIgPaXv$-IvfTR$!yw`{8Tx zs#w)+Ms`_$W7_}u=k`n>a~Uv#=8}VsC932P^O~b`GWO>WT?*%zNeej8 zXvzUFb&*_3rm}=3J+fbocL+&`(nKJuEvM;+#aTjF4FvgPxkSasWAYaP-OuQFeP!x+ zeRGtsLLfit9@%Pa7Mbrh6_T;rAs~cRA_A*&i!cbQs7a}@4{RqVvc%tw#msNssO0t- zV#COy+7gI#mvZ$pnM>A-s#4%U_@9djyP1U@HxL(M|u4?a^) zuH$WO_5yVbbj0(5WhVQSC)W+ZDD;-1(N|U)T%J%u^HA())I7i6`IhBvxU&N#X7atr z*e0B|dS%pe|Z zdknPaCtHp4!Q>Q9SI$?bm&9||INRmGIo)VoF_()A_w!O~at)Us3SYyZJ{IVoY@S^# z*Vtbyy1yk&=#XEEL=4Cd-~^gJ9m8d_#8f{HAlU3JkK0b$isW6kG}x|`BXmyMBi?dd zLu9ZV$!r^|nwL>Mz|(SFpk#S(X(j}5eOy`ZrM7>K@*IE%yi;i8pvqhJG=CjE zCQMwHHzKV6-R}=|;6G9`jc9@lsDGo&OZ0M!EDMZ<()AJP=`^p`z!1>E)}0@~6lt-qYFYi<6wC(Kb$#NNI%iuq(i`sW;AAl_8W}jle)J zRS6%k;zl=KxnZbyy2@ws41QI)e@MSyxtKa%p_bl^3Xc|u+Shjk_ZvO0vu4jq(!tDn zS(;%xJq*6s6UiRf&j7(|ePlbXu7wN7YsD0Bzel7}^>wf0fJ|PyqloIbM$>TY{!m1t z8n_dfO0@quCW`~aJ^DvCf{^YpvmVvs*QXf6?kIu#jZ8`#`+fnoJ5lpy_>uO0RLJ89 z?^kxfo68{#+_?RETj6UM@ogLUUB~4+DISB?uW+Cg;;`q*>6Kka(whE(2TvP#3nSkh zx&|5#m-S+}TUI5NNTw(Mdzv>PlgvahyA%ByJt1fETXUDb0UL$NGW-U zqW-4-u;m?b@7%L>WixpZFPTo*RP3S)Ol`3}9l~fHSMlIDgC~B?^tcjeyz!Ga%u=gl zXSTl%IAGg}ab-LI$$HU!%t%i}iFbxg&2Mz}LNC^BM=jEdxXX2y65`#-c=Z}eOpd^K zC5hEhP9mEcSYz}C@9yyiB77$mDBlWI2bbXhoz-}1cg#==Ih+L7^BlalUh}S$-n`OQ z)Cr=>W+jjxSY8WLY3Hg(FJ1f2>Y(B#w0RZzJ6U=taMidM;P%)_1Diqi}Fz8*4*9Z1&7le{GcMy1b zO7iS1DZYi)sl(r|qJSQCvCR10{f7zsG0_P}Hjzi*y}4z~a<@}hk<={9EYTYhKVl@I zAKV8J|KRquTDTjheR`u-YgULeCP_Wl=I~HiDk0C+JZv@_ad8`o^~;Kb5*Yaj#zO{w zYB+|xrZ5)PSN5P8rvMHNqgtN$8wbO*;EK)5o;GGUL0N1?lc0{TeuFu}?sa`=(dpnJu4JG|e-#L*acx@|fE8MvM^~D4Lx;7R>g%S&s zmQ}U*P@CelPvKt9n)~HDDs3l8lM>(f&P9$|h2pT?P#vkMDWf<^&48>)$vd;VMZM9v z7VC=Q$MY4}6nayrpPEwQJ_%})-5W6&YT~oup>rCdW~BgRX-;a{i#&U=r|-{a9lMK{ zS&Xc#7if|1bGilwG#yx_R^2i9b8`S#Gs1-ikBKYyh)B8s7i7{MQoA4VLPdAkWvsfu zPT(U*V&4)~3-NXo<-clwBf<3Tz}RR!FT@PXq=BTM?6a5q!lnipWgt6SH%lF)(wfTA z1o?sMhCw2W9X#9P{{1QYogR*>E={}V3E8#uRT%D}rmFLg=KPDWzl#!J6gsj)#i{z; zgvKu$#)J!Wt8n)5^dif)?+XIOvguER#Au*{3skD@l&H(LKb)!Ew*v%59I3`&a!jeB-UEWNu$=2U^DDFz=rqRb<0R#bHQJ zgpk~}qZ89u&v%^Hcz5Wdm21aC@Yg(Vf-ozbqdn|Vj{5S)t5+0KSo;#lovAq7;P4?6 z@zz}LaLY;~y0$L5k8TEKf$qLNAC&8Z8h;$h^Z6ruT}f9_2oBUY9~n9m=GYeBoUmAc z;HIiGwI?lE_g9esIaL0gdSY<>pbrr3ZLrxdwfqz$CMp3VF$R~O?yHb?j&Sd{zoogE zT0}Yaacoe*+V%AbB%+s6mI9KhuHHv<>XK(hA{*oT!}GlOTz2+%>w4(mrSau3gHAt& z{67LbIb1+WgUli2EZ$4GSY{kfB<(lNl*JTEh)*}7Zf4Cnh2EFjpwv?Sw9%P{LqgWQ zmR4szK}inz8U|zVwa|JyvVcDghI}=HkL{_R{JhJG(PkAOf8OKhJy7OPMmU4p_lVlD zxiV1Msp%_Ub$L)q@0QGEyN5|^dvNj8h3)yy{frLrE&UEGit9-V$MxnVrM3quyk=gV z%vf>rQ<44ta9(UqD>~Msgj5WV9@4ApcpuyLvc0To)dzH3MtIeBsP@!5YXd7DRK~(% zAKR_FUan-xfhN|hD4h%T4Etk+-Y~SSo9MZ%O`eR<_66?kn=kQ`?E~@l+FiQw!ntb` zmXqXD(B2CS9{8~8U^yYl?)Y9vw;dPwY}X0m=bJ0Zx+#L(9S^;0He1Bm>jj0ck+N|h zg5na7CZQOBpBY~aQFhFIm=g(2m}&>}QrxHTzf*bRe8{TBeLh0lN`2bXyWeQY6{+5p zGcu+2RpXi6jn;Wdo_fBu_^p`!mj_I+gE``oswzT8U6o;T)h5f7+u>SjUiX=a(|#3hyQpGkYygwBEi#K3uXm+E zGm;0G;*qw5?(K2zn&HvQbS+W!6{GN21Q^eDsp`5dLC&QohJ2FFe3$J8Z)RJ#o-<|L zJFuzfvi4$6_jF&lE-9WgvDDsZKGszbO3a8~sNsabaOKA9@b|`H$=~Yja?j4W zUG`5~E{1WSX=z~f@9eGnw8uE$x#W7a<+D)VbQPqiY0IxbrnB{ieW(jZ@&1dAUmTbL ztbFBg@Nsu&Z9!c)y&ULtV#fjvovoW`noT!yA4W||Sf4s;@2mF@))9Sst!B+6xtd6Z zE1#O*bDOqVaKvU=FU9xjbc(|;Sb4m;-|Tqn+dgR!v2tSJM1sl4N(!?03~Y(-fg&RC zQKJFv)ofu&I)O2p7G6~qlOc+dfaoJ_Ij>CXw#H^)N@}XncJktr>;f$vkvmja za2`yOT%PRcx_OY+ODYNN|%25nJ-2f=*_Bw7xR z(BLRs*muA@2!SdtR}TPQGbRgAa-VJm&XL{ds4^JvvP$-sNqPG?$57c z)OZH3&kutmBR%U~{zuOqbuI)>e@itoiSuDE|B=p$pg$0m2ocsa`uehTc86^os_?Md z|68HWjO1N%0VxT>K+j-$R(J=XrubB>v#W3+9YsbTT%@d4xS8(uVmg18HdDHR3`(UA z(Lr_bdNfsvzTBL>N+!OHEK`VH+1R@J!n7jXmRB#zkvfHBrXh;1OII}y7F8^t`@^X% zoYgK217i-T#9dT5jEWT>Tzba-3 z+&;g4cu}wHM6vc-CC!lEd131Nn9JfSbmh3QmQxQq-0>GYb>Ys(Dr;5zdZ5?xuN$9d zfGDdBQa7Ucm#Y&uf;aEywAqxx@lPZ;2*jA=ApKWF?EmWpaAQ{0<1s4qEXiX(Lo5^T z?;-@DKwAs*04p)>MYP(-^``KV37UD+GmX2bv5=aIzRXK~iKU!%FE3?g()6pn9=7FN z42Dw2*lGSvXkWz~`;mu!%Q4GticYkqb);$K2v(byy8APmTWM_TT!XxI%7sIwQ)$5G z@{&l*c}Z82KUYrROT(A53oDzf0*}u3%c7?K9Z{xc3+&NN2p4zYg=o7G5)u`cN-{tp zy6JnzpiVZGe=A#A`?Ff^I`;d(hUb%BKmd2a*RP^1nw1wHEG8)=wwnD^^IwnyzYAk6 z@M~EG0Ovg#^>-!icqfm@pIRgGaCw#aHS_80TeRHpL*9kNW|Na_FajQpB( z&JyZ)JFl?U*em@4_Ixu0i#kZ{D1!LyYM31l#mLA-)Nx@Bp2s7oIjlDY#NwWtf-D6^ zmbcjEyZyv^Q>54Q)Kt?$R;03HF_*vZ`Y%8*0r-xhfml|J5ofvg1b^V`@bUy8(aZF8 zNTLBiSBO9g2E7~(UkabVs$+P8%)2c6VXT>LQSuL`#J~B=gz4|7T%|@I$Ia#xIxMt< z2pDt=mD9kE{pg=8C>MHuj1qTI6zjKh<3uoL6oR*I{MLtouzgo;)j*qR1#9#cl4)<3 zNXht((+dNZBxy|YS0cdap2*Q(z2wbIpfByiXPKIh0DD{DJ0uZJFOL2L(j`U zK^I=;md=DSGmpCb&6@n}tF@Hsd~ILc1-Iw@1>t=Nh>Xlm;WW|zwYT5TlT3U8y7bkd zqgMXV`hSNNfRQ=(k&-cedg@<>m2W*UFi3TxV)xZb>X*0u3Lk{#f5mOsKZ3&e{0gy^ zjR|zFiuZCm;f5WFMLiMu)C4v6%5Uop)11_G=J~gw z#WAeS3(aBd<_%M{4OT^w?v{G3!h2Rl*_y`qTQxJ7`uec!LXj0f9WJN284Gw%$&J*- zgbB(2c$)+9uV7GO_{agj#OnYMbUFYTP?jGT-lc5d&<^}yVq#(_ARHU^`9puDf)#&Wa@|KBK41TvA+Yg z0`|DWLyr9Q|=;g1nx!qMr5H82J8uHVCzZca4fh z11xP*exVs>!PBF&@jKco0jtqo$qRTYEH z<73J5&UtcjUY-MW#^e_RL$}zB?rDe`T{EiN(ITK>@0LL4i5HY<$hHNX%*`yagin(*F{=y=PXWR>of0QpAq z=U=O^EZ8uC6WReKdB6ecf+5YgP)&JIYyvh!AlV#b19V@8OV^=7?&tVp%i7P4D^7+Q zh9_antxU{NgVmbbgZdXr9KqOZ;h*nnPNz&FA<8cu%^znnQ98%WP@5TCa+`s{&r~fJQoD=|x`uHxXd+*;nbgef3go-v1Y!vy=HcA%f^biR91A zXhqJvevErOCIU-A$Vwo&7laUx5{$)+BA^;5qLZcVsx2{7XyW`hFB8SRa3FdSN;gkZ znR2b@O6_O8+6>u(dkfg5c0r04*-bMbPE+}6#a2K%xFEo?8>!sUUQB;nb!~D-so4Tz zEV+UWTLYP|v%$~S-9~S5XPZ+0#)RWKpNo-}O&Xb>G@eWmaMA8ocBER^9&r)(v9qr< zLi6HL5F2^md&^N(G0QAhxb%yYEwZHd3{B?cvzN!6*fHG;oX5q8TLgYqa1kd@d9pAW z%2HMXp*qsQoD+Oq)zj5KxVt4VX6s=so`w?+Jp45P6MImZYXX!fp=ZhqViFkbGrYFg z*bE}VPCT*a+`QzT5O0d-14<%2U9*n?b{E@A1TP9Ghw<@I(Svs>2Tg>%qClvbA)!WaI8&FU|WoZQzx&H_m=mAQ^1Wc4f^6-o6@QApfziYQim9Q$_8hZ@`54AMSJ z%9|QPTgjqSsQOlmWFhqAyMEq#g8XP+B+mmfig2F8N z*wfst$EdAlM8+@`XC5HxUN+`Yb_BkVs?5LiSFJ`g2Nvmj`@LgVpsm<1PV(IpOj2ag zs@Yo-_JwLDZ&O~8;NxZzxakl#U`g*|<%e8$lDurT*muN4YJs>#Q2e)G0WPmXGGuwN zQwRJdw9vjf^H2e^CZn!%sWTj-)t#zsc;nn2um#c_I3i;6C$EsWA&lN{A|AIjP?kij z)c2RnKT^DCUSCe+7+si<=!q020 zUikmV)>Qz-wJdEcxCDpbPH=a3myqD@F2Pwe!8Jf|4VnaZcXxMphs7QK<-Ygs3-^Cj zTU)1U&z{pW(=*fEUw=Jsm7P^&CR+#{?RbfN02l8YZ_iY?K3el{JaFGUr1dy-EnOQt z+~@#r2sl?)`*_!)U0XQ4u!aHX6;ZWl2=R7IGc-@1?5N+S6nC}VAAWtLIt2EftM0ukDr5k5-Em-8;|hqxydagnS4QEGNL(1B@EI7ilUEvA^px$ z*U%p+igGEIsZ@0$G2i5J8DDYF8kj2<>OEjG@b5u<-$rg@^_x+zEsgO3?_`DRZKOFxJmW2>K*mV-rp-=_W1FVc4d2bvp`g?uw-m2)yLFx<*;`Gh1m#_TqQDOrJ}v z)&g%sin3LsFNtgQG9-6N%eTdcDu-hVdCKo-8r5mx+nbb#jd##d*QlI zXlmZXZL@OkMn7l;RDZ7?-qwd?I!+?mP#krDOQrzfyXwQNi{B^57s%wlpV9ZQ>Zl*m z+CRSgD;Q>@0P=5IH3!IrgMxh|h*hHHAce#qSmBJmQUqf#iWg?R9ky2&5w2JTUb0Ib zm5_XYhqg!)Ra*NFTmB%7m9YixSKqs0T9X>(o7?jSU=W4LPihzsWTK$hVs1Dy9WLw{ z!YUMr9W!F3a(KH%06p^kqhRTTIh-qIv`x3TD+#@LyNNt*RLsC}XCXx{brEa`*evkL z|M&AxIn`2AZY(-Y|0zKqzn~`%-feHh#ru7SUPb3tMtFlOH+{EdBH$r^f?;Z~hbBC> z=NHhnxN!12$YCg;B07aMj`!Gi`GmsYa55Ws2t#FfBMs=FrHfTj@%z!U_Go6;w!DDC z_9(fTtG@(bW!{M;>O`=^>Gys@^LcxEf)eFLN(g8Wt*Qohe{PpoJo0;f0DyNBarVJh z_GxTv4ih(C%^f9j2=ikWv3O;?ul)$s`u?6U7_vvxahD)L4QXL6Jby;EC5>u*WukU# z?v~5Fn?W<+q0rU7XZ8_zyMwKN-{BJ2Qivbs4S~$%D0#?Zz&9>O>mC+XeHrlao+IkN z1NP_O2|=VNzWAeHP{Y7h6MF{@;i$kNeqF>iCAl+B>x+DLB(22kbk zD3(9}QuT?oZa-d#$=(v*j(I%%uw+1Yf6M!zSxVSe?tEj?XE$qF8th_v$acu>&WL+s z?0I&?P6`&;*8fItwc5RvZ000Hj-Y$(g=)nv3*xoQs{PQsD!#w#>B5O_DuJ`H9i+iz zI42)F0gSAKOFm!lPFSz6nXPqQ6LwGXyD%Swh0a$!sjPnCMB!Z5lWQa)utf=-#@e9Y zI|T~x;+>8Z6EWLrrFszq+Z7M&L;U3cvD&cT>AN~R7&|&0Xa+4fOsE1Q@xqQG_d{Bm zTX5QByUy*WpryXEhsF>0Jo8ue^F`~=(HN;%7$kA;>$TFT`9vAnHogVmo-)z^So?J& zY_Wg;uBs8kA0YJ8Bqg1#cM_EBP`V;!D*&Hn)H4$snQHM2%um-ORY+t0h)K`s^+1tT zE7P|(g|0F@kJ-|)s(zzJ-;ElCADrfjACzhwg$igG)LSnk60R_9ehD1^@o}PqxbrF+ z@VwqtH@A36x}_q1$%9?jy!<-l2HqXen%EJdP7Wqn`!PmdY*k(wbB*eYx&s`IY`hd& z_!B9eY&2amL`2q zyb7J~S*}uuo?xgB1@ZMffzaO^c&^^ftPJAEyq!{ea_6)aIEj*)f3}yynIIaN`9w@n zGtD8pUuc!~WK>=a#kJXv=P+jh(@xQ+_<^B6GLIQUMjAfxI@;$2 z`$RQ9;hGY#3G*Q%IP?S%ZLs(ZqSYDiP9A!5Jsi(*-Gtn_ZEWe@Y*tA+oF3U_Mfs0e z-rfo_uEK)gd%&5FbU8C7Zm}gF;K+cH zh<4lX3NIv3j45nmF=O*5+8myv()&`8XPb`{QL5QviFvvCj4JJK7^>SdRQ^UN!IU^Y z>z>6D$Kf#xfc8WerBz|rZoszqsAzY5@yA@`|Bg{0f)6|vBI!@U;(j*d12d6}J|{ma z@xRAn6w2|$ZI5xvDjbXwap;>+)3B&>E0gYC)K7AQQGG}I(Y(Cr_8mJd!w1|s>U484 ze$W{j-TQWWFjqY2yf!tEF~+`UUoh3RfPx`u&K(;sI62!n{7p-SuHEGuO zJU+^)*bHHCE9(g^WDggg@Mm&H_JcUx4}67maLJ|mnAgpB4+wlfRQthUujnj<81nbF zddS=K4{r*>9zLwGd|psoc|dt~!>)C@HZUddfEg4Wd()U4yHRI9X{z=+s@e4xe|Yzhv`Z7V!SgEglUadY+7j!0-2UzA8TNe zB=z*`IUOhIU@}^H->dLhf3;URkp+Og_>x!U-gb(^`&-dd2^-p$SDkk?@Zh*JX`-e_ z&F%rxm5&i~&u*iRpK|rP1`H|Zl+|G+|5l|J8-AP5R;^_2;XytosW-*)%x`t_@Z8UZ zumP7JLW|Yf?P2VKR+ikcuhN~thh=|v&IQR!oQjEri> zDQ>@r5wE@ag8ogp3}c4KIa$tAAad3bu(6lOV^5WOogc(xu)UY~LEE=QhetLhK@2#Q zPt;K{W@z7F7nZ)gVct*M*q1`W^c4{E^t9`?WI; znTB7>?IwjMNp`A$J{NiY-1lOjox`IUf!;fme=KbTzP?rNA7GgLDPK0DR#oEFdR-Z< zp#sv#z-lZEnQFc00v4T5Kt_OG%;%3J`I1D^`qAVhMHFavS`2_0_vn-q_!=@qpD~s( z;OAATlPIJonm*E$FLZN~#bsz=GKQ$mngt9!3sEbPYGc^g=}hRiNxBz%f>0f2<7MV; z&%hH#tc^_8;vRWyA{0BItY;O=7dZ55A>B~V3n~+mp|tQ|Ds)MFpWla=iE3oKTbFoW zj-ana+DZghD-@esuOzj<2k!r=3nV1F#R13zhe z;t+Vg@=;fb`w{|B=nnQHRqYn}X`eG`}gmAhpVsTD}9PJ`QGyWX^w!d*`$RC6uj?`I4}06 z1|E=$BT~-qYa~i6cuT6VRbm~1lrQ#+6B`8bEl@?Teu#l5)Dw%8TGu{efwEMD? zqv=}5zTlK{@o;+{osbaJ_7%nzj&caPHSpE^ zEDeC2ZK*jR+&$kO>XUuBU>`Lj0ZPky_;wd{8oU9`1T5Lg8ULIhXAghld#<`89`wAVNs(?s#l$cAOA|AVj`6 zJ*|muKY!V)5xf;v)XwfG3w8tFCb4RT5_|aijXd4D?-y?H-CY~i^;bU{D4GFrIz+Oi zguuN4j-3Rxs&Z2~w0m_oLYDM#Fu@Ws`x#XG?eo7MUJ+);( z96@b$+H_YKzpi%cq$aCs#Yd{Nnj|Y*Yr+NOdv!Y<7xw?>&5bTYUEHT$F8yvYTmM(; z+35@28z;_dhlWC|TCT&Cj1>6BNL^(;i19t%@**LuyLPa2xMk-AOr#W|?NYu1R7Wsu z&>43)8tQ|LtSoyP%x~Jq&kR3m`uf-bER$L22?vAL=j8|Q?ubjC+jO7jj~6D(yff}* zaQ!p;*m>C@^x6^B^hf)&r+nm{FViZOh zipN|aWGzTvJ371&^!K(CM?m{Fkgn^bwwfaNR^NcWwsK8?(&IsBeHZtSXf21`JN`XK z@d+w?)2T6yqKngFr(yfD)l~=sTI%Iv5}R9&VxUyOh*I)CYFgT_Dvi%<%p#A5fOU%| z?_k#wl+Q4pzg)~ed$BYwCqos29mi|v^Qmb~2fQSP{At%{3b#HT&xaQOD=#-K{a?~- zpSle~{{~?~I9y8n_lBm7t?xu4+b3HM+h5Auz7^arisIYX~OrPtK#wRZP+0eA?*u(>~y|bF#hB6N1%hk-QU`EHcH*| z!_f8l?67@WwF&! zWssuQ^&0q)kYzx@+W;_nO!a<@D1cA7Uk>_;g(o|IX zESfd;NKWspNtifZ^Se~h%nKg(wYd2kUcBN~RSF?+`-1hIO&veqrjuMR>%L@7+DO3&rgJ{G%{Lrb83jhnPj+9iaVUy%RMjesvRl|{W!i)>Yc`h7 zBZrJIoi zXPAz-w34=TX|RbkVmA;SBGq&Uy-?{|!p3qV!VE7ZpCm<7o9ZyRb7phMba)!eh|TeL zU^0b3N9TFqe6?iS6LNVRYGqG3*I&E;-#Ys53n3U#q6widfjK=}08}-N-*j*p#&h*P z*~aFdTDlYh=5u?jpqOF>N-FsILjL0?Zu|$5_ma8r1qugZ@eL780*I70stp3WQ)EMr zr1VvwZVhz8a}qVu^@$N{r06OG$Zy;fc%>f z=t*f6?|B+Q`5L2BaFumny(m+~Ph=gJ>@vQDz7bh#NUtEJ*?B0#U}*?}rzvkML6~7h z&@6Cw7KZrqBQovy+>fdplP|-<7uCcR4og{@3}f^Z&~a;Q;sE*`FUt zC;+j}%t1^mc;9F#C>n^X!#Q-&qGeu;ojI{5rmP(8{wp{pEzPD8JcUK4r&_QTJ*qpK zm(pheb;P!znH83GsI1I%sJu6^ENwR#Y8QN#J%d&a zM0XzLgRr3it6#Fs$)$2Y}Oc(1|@c}0r6vN>CA}~P})v-V+gW$pXIN%xwH%U3cCA!-Z})n{cOceZ`*(O3S@D&lM7PwU)N zx=yOf%*8I+3+DqguLoksCSg1ML^3cxQIftkMwc!#oShN0|K>xk=b0G(5uD-K# zRUbqlVD55v3^ECnEGj6tG*MEPl@)zV@H1SQFhrK z26f~S1uK8GZSAVj4Low4f0IB^ldqv-B98l8Q~?e}0ljU)k`U=r_N3k$JxiXLrg)RM zb$8tGy6Cv1FXN;dE6JqfW>=jYj~<~nFdyULO17-l&T@@bGyQw)-)*6F{d zpF3P7C^sA4%TK5k*3d({#k)y*-@xp~=CIbF1DXs>RrS+!u!X!AmsXZEU%2Yk?F1TP zP(+Ou@mv32Arj#?>SX;~R;oy9f{A|E%u#$4F;}zyq29p+D|=_%1ZAFPd&9B`dAa| zMsN=KANA$%n-H$%pY=fyGA%WVrs{4o1VsU`H>0>aT~yMr!C_&2wIi};31ukmRLAtl zQZl;>Q6)skd6Z3g`R;5Bxjlp`{R^2ZRPj@fmHMZ5OxG**J1+RHcX8^Pnx9xV*GB|e z9k{cBYf-X=^-^FZOJ3*_XKB~19p-2R- z&RHkZUJUI<)+-Ao^Blc2tDfhXrxxO`KqAu1F6hkI(p-L$e!O+P5hZBY7ek?5ZLP61 zbGF4kGqC0P%dP$KY=BV$bd)QNxdo2BgTpB$6f-UU3>LhNB25#=jz{H47t&DdJft1J zJ>*p)3sMuxGmOCV1wVdGHhPQT8ay3Z6eoR|InF2AjGm!{q3$uC2Ykhr2V{ZA$F^Pt zNbj^~G#CC|@^J1m3D0tKQ~}Rp-G|3J6^9W7q8~g8%~_2QfYb{rzSG&~we08SAlB=> zue2kHekBP-Rhs?}ZhL-K4)as4tKpJ*U7*K?J5SC6q!A)H&r|lRAK>!k6%;sdy*|xa74GU8!XqHauYhU_r7*2KA54=_QROFPsT+|kT+%@Q z*3%~>^xP9qy|FC#fVaa3gkwnLesilKjl=GYRUK}u6L7SBuZd(QT#zkQXkX=`5v zBR&{LzvN?WtV!U5uJaYpvA$5Uc%+{O*=-9KoOgZCm8gA8#q~RLwtCGb`Qr0gMD0P} zwSBkpJ+XYfi5mXb0>VaUD>YEf9XiQ7DQLr*pV4(as~u6cJ?TbwDI_r4xoZuQE?xI4 zo)Ou3N1^Eu4O;#XuIP%A2Rnf$ua>0IKG@)MD1;Xa$4TwQmZIql_TT({li#j4l3pI9 z?Rn0v1vVL_twp7b!m)s)#R_SKhXgate8F=(P;)yXu??qI*cFWK9kd6dv7UjPoSxeN z-B0KWE}J=V+mEHTK8Su@(J2T=7B~@LDXs9~<`X#d4ntc-)B}M4)LQ%yl%UVG#p(d7 zv5)X&t?U=I&X#pdgwtEKm8@EpU<$oq&W!~)$=_k#9oHWm_iNL_Z}K0Vo;xN75hsdo z?Kp(Pa&OJ(u*8((?nonr6yig_p^+A4St+ZJSO!+RAd&_z&TYs&=%PGvlVa&CZ8CkYepReJPB(% zbc58po%!Unjg4&D2RiiM`ivrjT2kg_sx1x)3BVA_|AjEQ#~VXnwwr& z*^W{GI|yb?sOnIb`trR)h$31x`A!75_p}BYp4AS6HrfxDHIvU4ueA&p&dEKP8+c8Rl<4zukze>6LS8t3%y5P!F4*hII1$r9;vQnqHQBaLNTjZ@)Bp6SUa5lMW8L4x;cgF zFAKGjs9#Y5tFkg!QNm3h7zO)~NYjG{1l@>?T)D-{k+ew0mez?D0&j4nJpWObV|E|v z=KfCA2Z{}}-svBpnxlh{V(owIoe(2TUKDM*EiW&(glbKu*bgc$a_yr1G}_Mh=C0gF2bFHf_Bs)pDw6}gjNoYNd zqk0!gJ~v~(zL&7x&05k!dw&vuj=TWbmnV#2Lb27`+b?!nPfGXw76peDze)Td_Ivxy zT7zDAl|oPew=`NbVqCI2QHvigAga5Ru#Z~vjz8UPo#NLKS@jI0k;hCkx6`x0+zcar zwMabK8tj*;t((&@SBE+2e3=eW`|iO$si{3NCnRz5v7SKq?{{7NjI_9dK2MZScL{(^ z)4S&iS3+M0f~mXboTX1EI`IYC8@dzt`CKGvY%;JY5Pm`;eHmbkLcZiey1RTlO>>ce6Y+TC%ladq;l;G6rAwK3YmKfu+j7838n@&2;7Rc(fwUyya$0(( zxeZZ+WJxj2g@kauH?7PLGWl~}IdbH^!9Q+*Iok$DT6m@Ivg}w23>@q!(@miZdNBkc zCF%a_a;Lw%oVjb#fO3B%ApI5Gf|O{bEJjohI{Yg!KkzNMWYT`VuhWtMCRj>x^5AiE zGWbuC^kPS|`~}K=nS+b4vQS@iN;zL!uq+k7%jO!0%uqwd;B$m|UXv;wREQ;lkdx|p z$(=EI5Io2RPX#Y@uWrwu{8=arU67@WtW5u@eF z*wZ3AFZ`3bTaHPB#h`(mxb-N#JD%lG|4E#=CeF`zuJGAa$}Y6F=%^=JHQA(`S{$oD z{Y%;Fb;hWoj!9X8uVyW$xT#!qogv(FL49GSVNn44bG75v@sPfoahny<bnckM{$(L>{T zy?wwmtQJt>b>Y~Go6!Ye&@`k(DJaC9!Xl%O8c-RzJ96MQMt)874Mgx;ezN7(?(X1v z39Du@Lh<;fv)CT<)i6 zA2E#kC-ns9Ls^&|hD8R`c;GP+bnFh`Xd)!_ZauU)IpR$Vj~K31`#KVJ(kmtF(|r^A{=r znQirq(Fb9r@m4eZcVe)6m_m1savk*Pj}7GP+$zYp@n&jCL(GD2#)$FqJh&Nm4>q-z zfhhXjZUi+ITS3)~c)XbBlo1kII!ITcE^Z6YPY`-ftGCo6xB>363O&1|OP=K6lw+3) zT(?lE<@z)q!uR3WIIFXyIIh$^$E`Q>ct2YLtXMt8RmyHAW0~^Q&*(b{+z$2xje6kJ z)(Gi2CzLgGX1eR~qQ!LKOh((~?$8Ncy=~L{-bppzi{Y+ciz*ev z==~U9lauvg_LVswsK(x^y`S?;`;fQA?ikeeiK5k7uuELA3;7w9< zU8bS{NbZ0rtsKYi?6X!hd8V$@yS8Ed+lx1y$@VaWg- zl#D3M{>$S1`#Vz$;%diw$=_kF+$53R3JYZC0@5l+7Z*@42Mzs}|M zVGo^8(vOQpch0(2Ul66I3o?(u)kACi1$S3IA{Jhxx1cL(pCB$8dNwYvH%L4-SB+9& ziKoxOHw+BD$!~IX7;8XvRL^zlL`;NyRUX67K#ceBnD@VnNhhY*3Co8D{te zBPlyOJ7sK1bMx{cyme=p?MHf>>`R_Ywfx8ZCWXf-K$?uTYs1lzdXaBjuYf^XsJ=rt z+z}~9^)PBrPHwd;H`ApAc^GVZo=zbS)klYT9HjkDT}g!u_vW2dI7C^f){B^ zNdKT)RT`|0!~t5F2hxA=6K}IeqcVt*W!Nxrc^YR8@5dSsKfQSuoCyVG~ zO2nWT<^U4@g4Hh@3#=$O-(r$xc8`*HYHEth4VZ~~{ZVS1kC&{fu6cEy#ioQ&hgKE87?i_{28H_dbT&uH9*fLyiY^@t{KnXRK0?W zT|KFP$nNLk#pG__YX~A!53cuw`CrPBgT`eEJ{l_P0XlUnUp~cjn)!fM3G~)9mO>3* zmwkJ^$5C%$xZd+K2k=E|hgdA!rj0kRXJhMIQlVQrSboGn5T}W06)I8_tP<7gO3%fm zx^J_>MAEUkeSPdkf5AwxRgN*<_NI|^BI`bQF%Dy)C!XyUrUD8!BY^V)iWqIo|010A zAozKsgB5`pkMtlAM`oKrcrNRBMjA_iKf}*;Mg4D@Pfwt)^_B->)N!7eOx+MZZ(!2| z`)hgsh4}pS9qtD47!RKK&DeEM{(~*Y8|m5L0CFZdI43GMl!-Ma>sO(4u3}jHzVB%p z@L({Y>Uc~saQF*|t` zyFIBOaLFh#ksurgLK1im#cCiqnE&;alH*u|et`iXC4v5R_0{MZ^sh%@5aQsYl!Jn% z1R;3?`DX1EuJd<`TFc0Vz=7)HxhK)$tFSC!l32u%ui+=?C@|Jr=qP>w$O*ym1mP1f z&$vuqHUhv{!J-nV`W*g5NAQE7+9hkDR~>@Ch=M3(IDVk&h>@*9!z4s_5D$Lqv-Y`a zuR3TK1!55F5GC{i#NTDo24QXZ#fp&*ui%TnB)v!$N@R8M7z%w!9OxkeQTTdRShs?q zLxet{`DEdcUmU)c8ClU+#TR8H80+PB`j^{}vbS1a1!(?W5J4AUnP9z&;BXh%;KvY) zdgOaSua*B_w`LN73^>i<{V3r=ub zh{YALy(rMejf0vA#uZM8B}y<46%%uV-Zkni68>j$rM?*YYRnSZ-KisxoN4@JboMg9 zB3qtefiR&@M$(G^Q-nte(DZ5fk&}(8=nLmd_CzKG_UC<~31DW2h)ZT2y`bhr^1oTK1$Bp#~_2D5ct4q>f1z~X?UgVmCl_}laON}@yZ2)R3t{c zb>cLCzxwoA#%BItUaipSn^ z8g(O*rGvmj-U%weFLQOhQ?k)YYkrT|WE+)Qzy-x|q@|{rr*AP}p{0P@V~!v&eO?Uj zdM&G=?0Pe+#wOKl>@jw?DB~$Omx@U8gPGCE?yiutGy8OrRuEN&Ct7}nH&19-Sd50G#3JrxnuhXrz#vSIq$KTxCzR-Z;Qx!RXE8NBOKj(1Zd%w-)o+IP!qva;Ns;Vlk zqsj?T98JZCJw-;(gS;{RxBnI@V1nrk%g!v6V20Vtyskm<+KW5{_)P%iG>*6!M=($L z3#S?%2lT@5a*AVTOT+LqUJ0sTwLtUWV#7*d6TfWQW&WJm)qJGhNcEr&5q-tTkd4^@ zY4g-{bD^Ls056@sf+J7+(b5k;PhebeGK0RxtmorU_R(O(vg`RROLcL*l+49{*YH^0AFz7xN_rFA-ol_u|aXI4*N|ot@fq1Qt)7< z+o^XBGiu@L+Y{+ik)mtUDekG!!N$N~)BVu=LdE3VlXF&aQ}xD71-;{pHGq7bxE) z&_ZkFSF1uzI)i=Mw6(Nk@zPjRwm|{ZLJfAzkN>%TgRb@U;XvS*-`3DFXh;Jw{B+@b zRiVbxybt#Gl{7VZa-iGvr7~M!F|aX(XWm#_SsgUUCiu=>cU%8eVB^2tWiJOO76Tt? zLFDvTUYh84p%N&e6tCO~FLnebc6R4rIQb|o_-MBgEBKMdz|ze0aC$?*#F_2EWWG-C5YPU$^-=xhl&IiR?V*Z; z;)(!T*W^U){bbnyS6)Us#C~4Hgjj}TFRY%e&#{%-`EG8*vKIlnl>HBvVNbO z`?}sMvyo{pHo}ltm{$wpx7Jh9Rt80tSM`}Q8z)tg?_O0@2ATQT;E(Q) z>W{j!dd%Y}FT&!}LU{FqRSMSzAOi>-BZH|mU1~pPpVx_D=ga3%B@odwJCiDa(i21? z7F=gP*=ky?eEyLY*=(EwGtoV{mts>Qarc?%^!k{mE^S1 zykQ9e)p2?7!fxO6-W;e5S!*G*=611bzqXf@_UYw^%;&Hh?DnU$6yUrSt&+|P>Gg0P zj&eq|&8BB?hSc*?*N?4u*U9|%a&J{~_||@v3{Fi=S#CNH z3R!l@!hF^Bi16UWbNtk^bD*jjt)s-F`G~DI-#+<&HUpCgz{~j zXa4=MZ|#zYB>*7{h=7UMkhWH0F1YPfkli5*?Kv@QEm|H@Pg(BKI`thQp z)I zTpEQ>aYIknpq9Oeiaji_;iJD9BzJ6bd;n+{smSU4V@Z=bF!0nkj;oAdi{abfVt{_ho(!tZGa3?r=AgSJ+zl6-=CS|LF z_t;}YQmSW!yR8K+k4(k4$B^bBus$^LbKXXGpF;#W2z9U(0n>?}P(A<_ss|)`NL4M- z=RWn%n`@ZKlD!;keFU(2D^gy0(y!}uT_T@W->dHI^ez@Rd!i&n6tUw;;c&Sc6|eeG zsG)5&T>9%22akjm?VCKg#jeyTg)Zr#tJr7~c7Dm>6W@^yPx4gnv}zYV9ka{ntr4#! zlWdzQDXXpE&h6qFlf3zET#F4`%Y!2guURXT*`)!So#Po@Eu#J5`FB143}64pklYA< z+4Wr>RHe$=)q8`rXdZs*Zw0Gr=6(4-vxCme=o=Y{T&E}^BLmMzBZRv}OS&GMHZuDr zLPyD;y+m=^1l^DE`=;>cpXeix+t6|?+sM`eIN#~GlICfXy!+aa`R)%6m>=iujI7eW zhoMO6k?27$Ur!XxeqN!c{%%nWK4clYns#|i9yUCcaCaSyvmM?w_l9F`Es&hb-2!x&iB_6Ar_5W=4kM%zQtv8gJ*% z-PVq12Kz?ZSfd1jb-Kh81zb5aU7A2eyK?#RVHIU>hqFB8k^n4u-#wHX0C<+)qT0tv zbAnfAR4Wh|TQ1&(nlXr3$bcz6-;-GXw&}@93bWtF&ClZF6&HaE{h1Kj{rkx6vTFy} zyPX|YDmIrNZ@87%wb&m{M&EKv=+<*)usq+T{qlM5#Yw#*amCsjt=vO%;r3` z2vK~e7!P4GrGTLWx0^QA&CRHeUsOXPmRd;D#d=Zf2ORoppOEJlq
    ~~@GPDR zF3W8?1*hZv{D~ZP$n!3RT^&*M9q-k&q3s@R3+LlQZ?^!z&JL7JzLvL++l-xwF;4#T zDP@*h2X5PQYDx>Z-50mA_T3g#S!@eI3MOH+Hn|cO-)#$y?t<*d=ZANWJM^7z5_BSibag^FG=dnqQvB)d1VR)uSh4l$t}UDb z=YiJ;%TZwhZnUZ=1)K+y^gc(peDy52+5F|zD_>hW61*>oU5IAGob6itTbjwY`sm&E z+8R!`^qw#q#71S}-?u70ZCa<^Hmz)A1{)+h%zElm9oMfn)gIjow(b1Z+zdPEdUH~zT#mF*>UuQ6<*){k>i*PqGq*yB=0lng zqEvGnK?RI_BMS^No5}q3>zyJ$P?E3ab4?$Mb@7Vd{qr4B<<5w>{y@sjxz0+LU4|xj zi}C4{jW;?G?=9lPN~~RG{T*2PV^4|w@vjPC=h-E!Ot)USar-AbFWdw@Z|3HUfRT;l z>$IR}DI)eNEnqt>UC+4oMz}7n z1w-MxCrI__XMFZg=BX6*`9A3W)F6ax0K4^5f?jBe-lgpvIoXKZiTJ&+wbnV*m8ln)&;2}s zI$r~b-iWjd>pnI;9T?+~zn(8x2g#}D#__r3)XwwqafeuoV^Qkt&Dh6}AsJ0o&?t^F8cm{{1@z?0Nma| zbC8}%6W(*}k%ZiEY}@UJQllfsn+x`u)rys8+UC!Xn7!B4tKXA09J(%7y0-E&{9Wl% z{g3SU!t@q_;@YRT8LJD=k0f%`QeA#gUa&2w4rkTRHUwBcZzrtzmy~idGr@M!0Sr%< zB|aqd^h2<5JWW~wEnQ*Q*&x(n_Sd5jKIFuJP`JO>-(K-w^WFgzQ7R=0LWz5c+wIc( zH6!?*iK|rPvfcO3O{vz8(=l^%Pf%~sxhC6e#abjcxE^-P3c4fDcF z0h*$LnLS%Be)yi1(ES0`*lnbXq@X>eREFj|h|_NbO~HEp*uI`UgB;|M{5Dr43> zj9p9LPz3@q>H|ny_eXc@PBzc1Mkx9Fnrz+9vBq{d_28Paf@b8jHJ3u228ieQ&f0qh zen=zu3GttYVskm~)u~mI{ep6Ra0?8!I8S+eD8l&k%0_nzgU$=ZHzR_r?iy{9RO|$> z#ylRwcMSxnwq=%cifqjG$(pUJ?yZkfD>k$Qzk=#lVxl^ShFj5tocsvj@AlP;y$KT= zaUZ9!GoF@cPig|OB?c78uneV=@spHH)YIUtA>5MJ0| z6dstmgHEw%Ik8So%$Cs*7aw>^Oj8nm z`tbN^mn14%3As^xYyyE`0juN??&_sgl;JDrox?_;;+ z?}Kg2$3nhQJO9YkTgN`RSI77{lyWqthhq%32*%s~U|_ zJ%g6yy^=MA%8*nlcAB#sQ&Mrpt{=y(R`|&rXOuzRnT%8TRpY6Z+IB-Jj;Ku-^elz# zsn*>Gn9;J2mVz#+^8(agi~&NfWpQExbtqH-lQfZxV8=+=2r_Cz1Kln$&x>s$gT7p% z_Lk>crl*tY5&g?$i3ea9QO_oe@&y~%hEQ6stiOa}=9>}03t8=}zLWL5lc#qrwOb#d zT$=+WtPFMrKgx)WVwFM4}K zx@iHO6

    7Dj^GBMl~Z`)ObRQ^2h(t^%X#MWLwuj2!Q~>-GaNjyK|91aCdii4FnDD z9xS-KySrT6-QD?b=Dqpen|bqB-J)pfHg&rD^p>^PUi<3U(clLv@LB|JCgS2u6Yj*t zYnLh}AudiABoOO*SX98^ace1)!ZHZ$rat~hd+d}y&5P-!7PGwCk1Qj z<%gAOw>>OgqR;syH@J&`K*<-L0nT%AX|L(rSCqt_jd=4^zkxRB?ID?_6-QE)x?p6K zuYXOWdlD(OofsvWR{|&inhjPlSB&h$AJ_XLb;cMvgtH)(cvkIdh01*R^#1D?)NJvg zYJi)$N@JfXgtLAvV=7a6N->0jA+9?mXFy+~oJdpE!PR;7PVq%2d^4l*B+0#odF5q% z!TJ72-9b0x*oEQ-baX<7a#vJYE(ey~k@R4*Jlgw#7o3W_Lns-7bd!4y#W>OHIIWOr zMI)2VR*&=T@wPA*p97DE(_Dg%9)FhOPq@A|1<)}?lzp<>GVp*}0Zww9xyk!lq8Yua zwYY$BYOL=$#>R<5xSLwk6C~3~{c`Exnb7`($;@dqS5Yr`M6W>_A~kWSLWJ#&O-Nxe ziiV3N;(f|Nyh|?9ZV4^=YLcB59F9Y0yQ8U$CS$nO_*upZROPGqjFuit6k}!Fggj&6 z^?wgu1+(tj@!`ZAkN=Q-jHLXHXo(~HnbU(O`)lxc6OA?A!ZtiryR3_=>7}uGvCOqS zfA)UuvZhJlf&zb_4%ayj?UU>6C3yQ-*0MqN+`#ev%x7K{Gx11xc8qpGEj3V4RwIN3 z4b5o!$E^yx!StgOW+o?6+l=GSluJhzsRuDif<(epBZ)>i{fTh)Ou7#8V6ydtjc7Ol zGh~NCEw@k^zj#$9sj!M|uS`g~2TbqlEg4IG3nc9N_HX4cr+R5DK23(7q;9hYf>v#* zwcTzRj}apJBw_Qr=QV>DQ{D-jiWu(Xreull+iT*6Jr(JY-c0I3u3&t?Yzu+!->3(swtJ&Y@w^3qw3tM{72qAsmxxLMNk%d`VHC-fHZ`%{%6;Bc zP=}34wyvJ<$JbuN+n--_k~l1ipcBa6R~ASwb^CG$Bz!}QKjaTlr&`bE{AB58$UXoS$x?j)}w7prqp8_<_r$K&Ck*)#t ztohI@$>s3N^G0+%X)J)GzD8z*`-zI?md6ig%?+DstPOv>wUq#?u`#Lnnko(16O~=N z1v7W7;S+sXx#+JFhk4B>%++6JOp4lo669DK>JVe3N0RsQS1TiTzLAgdRjazK=!5S5 z_#`xCyFhkZUe$BbOVbg;LVQ8q*%m=FBA z;`0q9#YJNPuETorpU3RzBEpb-H`PPg`h+8y3=+^~(9X{Bcc zT<-W6^4fw~bsEOS$LsE0_A_kN3|}lgC2n|O&J)1QyTEBs}~aCB<4 zXGq*$f!X_>Q|y-_zlV!ecSeePM`DECR$n%UO4T+Awar=CNj?wF4oaV#fL+3euTRdo zv}vJJmtO`y%ZQnHp!~WWc3fH(x=1EJA=v^rxACghVgLl`rG|&1=@iXldH9eJTMHAz z+w$jRlXo?~37kM_w%dYJ@!ydz+i&Ha5A@-W!aH%u)3RAv87FSEbbSfOQ;b9%oSHUk z9Arx8SWpiJfS7G+`78xHaeC@Ira;A?@Z2tz-LH#ehK3l~_Lr_%l^>7hZA8_9m@?@c zpI-aSkI^oe+JE9cFo`a@DP1H{AD9%ug*sXzzj=sM1tp~G_~UR?9{BV zHUnkj&Sh_ZDgQ;?|2Q0VFdSu+eJ1imgDtly|9#uFWq5QQWtFh46M#8%csM$pa=cSb z{=zoOMPimZlNjwv>Gpak?ow6_=Q3IxzZ1(O&F?qEXE*rD>}jRi1fkAmEfSP7+$9fN zl(&A%A$q`7Qoj8)&gaqr^*0Or&DZ+5-V(?(lEMlb6cp6@`q13i2z+jfPEMAL`Tbpn zV`$u2R9!SNTm`C&hT5^C8{MU$`O5myfbS^%#Qgoi=@vS@EBvoi3|B(oze;{Ic2%L2 z{#-rC$Ck`skx630+al*~`l)6j2g%i*UoMHRE6`IO3NyXE6T(ZVMlf-lkh&c~n<&%h z8=`7tKrWe|A>1prY1n+#<4aJCE2S!6^go*;mu42X(+Tu1X;fZ_nw0lUL z`IZOkA&FDGz0%aBYPzBa`8Ge>{!m6neo`uM@J9lg@_G+xE~z>aLQoKS0p#>Y5!bjQ z0r`1I)V-7x@-67^6YSnYGT~7n+JXn<6l{cQI1Ic6(9?SZW1X_W#-+kHCWoQ|04b^N zpJomW3ilLu70~f|+gb9Wvfp!y%X`M13KQ^U3y&DBMLpug<=)UEr&v?}n_mFR=5R@W z*AEK4O*n>LfoZ?F3{bJZfz(IQY3~h|y^sl1ZbXG<5yUIk*vga9J)@x`6~>!uSjKt^ zET5PDZi;jteD9Ztw?2Q#JR%NG9JWGwt$;E`rkxTWai!>kIxcpVr}(yKqKd|+uhH_e zwZcQBSpp`(AsSOkmX6a1H*1rGVJCyNW(9)M8WsihMMUazCrgnQbPaG%1?i!8V`UwS z?JL2vl+3nAa<3tEU)b{{&Jv(piwX_T^%a6A)+=< zr&hJ7bf%D+^)vNgMmc)239>aLv+QqXi64D9AnfY z^ZUhe&hR`dB+4*eU{ZSZoFxBcX2kg1lJf3q5~tUKyT{ym4kNc;3S;97a2LB7X10bi zuIQ%7qyWIWnN#bXKLx{-|Ll%BbMX-aF4yV|;_RSVp?Y)$ba!+PEH(2}jX zU^9bFXQ>=M8R8S(L(0RouS7<3irct~y3{9}W6IW6KDQ<4D_Z^Yjc2)5KZ{LAqOEhC zg71|P?337{rB$Bm(s^+dRa(soM^DZ_9gHh;v>~`fvB>Q6A1b|weB_7{*)5Vq=r zD@l%rjmlpU^wKL7=wVU9J6EPwGM^T$P<&Q2n(v+aAGLe=qx#>i46<``@hRbXWXKrp@0V4C>fB6zlQAi|7v{9r14hX8v=&8*( zL>65N@TzAdxtz{4g5gpYg9$b8IMJi2{1(lf=#^vgqXJ+9i-1zn%zx)3cj{p=9~<>r zGVQ8#FAV^Qf&>@pEP?)&2K^~ADrnSh|9yloc@x$eW|4jHv=S0K#!gC`lz#HW<5@1&hq@}#=$4@V~IIw=o#hsEVN zIXMuy;!$jwR%lR9_PB$4tMMJc#Ik>}9shIf6XFoUPHbNYf(0FZ$J!i35MIb&CoF?Q zLsHYg!`b3d$90f@C*MR4naj|Re;&*D zg)(>3Dk{WI^WR*c-~Sa3JQdOwlUezX=5#yBta*s6NbPoAbLBYW=OXArps&Z>Y42D% zHwZs91_RPipb3wBDqcyW1=Ob}0JT(CcC7i$4T2Kx?MtV-wQSB->Mq&5E) z_iI#*9CzAkuZ3#b>f2jwFVKlMSUSdD4PH3pbFqP?nP1Z%LE}cFM8nXtD=H8vnM$>K z`Mqu)vQp+v3@!hjZNVxN24{X8v?i`$|1_kgl@v*T5*r}S^ve+*Lo&!hcwl>vaPafmD^%j$e38ikFh z%{5(qv0EP&$m=Uh9q}!4CY;AY%?if9nxl?N!Yux)KX4+yXsPuONCDVl5PEkMZt@RH z z`&|>r4~iy>_H;H-?DYvjmh}Pv;uc=Kcyc!RfgCakR1DkGUuGBhbt5w* z`&4PHftXTu{dg|AAPnuC#G@g4M{jTvwAOW3&~T4)SC1Ur@hl{XkBZ>9EceGrf8l#> zLNA!LQ$4P@AC+QqJJWzlX_T+McAKDppkn}33TXz5f)l|dg}ILOm8D((8!v|89Qch|)FK0ghqwCVS>BY2+&fKuv#nAiu4^L0MLTM*@j0Tc2*-iD#DTJEoR?YB+#CI_9J z`rxvgY$DX#UQ@#|GdrsX;CSGw7f929RV@i99v1JV5Y=zSifk5`&8=CwV3quGt0KJzn!?jUhwhg~I%go6R`?a5N` zw{Px9Z!ro=sv%Vhxgb@V2_+vyyh{xH7vmop(ceeXcu20wd5KR0_&&eeh4TmR9FhHR zNKc{z&|&;~a2D9K(XVEb9)#ez{?2%`{o6OFWv6+qg(jzBAdG9=a|R}~JqjW_zc$GF zg~LH~YlB%LU)c1f+H5wy@%Yb{w?ODfeMG0_g*DOgWTYCBmT$EnUeri2G`GcU6g83< zPC`Qh`&z0uHaBpegfKx7)!9Gz%3v_|=oWEp*KoQ2t;gC!#Y6h%a8v`jh89#W$;2VJORG%^DzD1C%ma6^$j*=ISQGRO_{iuq42bVQC~p*RPA0s@ zwl|r+p+S|2l?i7L4~CUZUom<#W}5Wg%Jo_3=xo$dJ$C<^20PB7tl=v-#<3y>RJ8^*8kPQ&`_=9n%_)56kSBr~}&%)>!O zzkkQvhE0cQoYJqnP-~mBw=uH4wj`~=(O581^qP6R0>-+EuX=cdq5v8M;$GGW;X*Hl6pU<|a!$ znSiU)V?$fUOdA!Q$;*a1bL1B|DXp2Y**U&4(`e7nIaZe08CoY+%lTYoa#j|6it1*3 z@5~W##APz++WRJ!i(4Jd4D4=@h7~j#Hl3EROn6%wEpuN~9aPdO4|iXGIA5GtH4gw^ zY-saKXK+wnEX5-4-G@gW8E(`It!gUq8Bew_xzIl;E{$m!w^NAdM<_X^TQ>4GFCON- zjF@WfuHGV<*ifBlXr^POPA{k@Il#fX05m;ZzK+xmk6nfpC8j3v5#53U$rUS^WW3zc z31|0)i~z;YXJt)+j6F9+CzkE*b>d=TP<*c!Q3jpjb?jNZTDR)*O=Jz3;|drRGE49v$D(1)~dDhV+6DS9C#1@FA$H<$JU0YCT8U) zz{mQ#TU;Idw}7uLS}mgghx}1N!S8V`g%cNf@p_qu*X(8Z(nD?0{_3-)fv}~TW9LcOf**T9RsyiVi7EC#`DvECu2%| z3&|cTa&$|s+}s)aoPP92l=-DMX5;2FqfTAUb7pcT_3f^2i&{j!Wo(m=kVYUo4Z!3D z_QKI2*DQ#?(UO|tR5EGiIM+=W5ztDp;FmhyINZCI)=!&%}xJw7GNFI`YmJfdoAD_(9qENc_zE($lp zV2q>;Lm)Yvz+Qsb!<|+R-8@6UN@{k~)UoDcG+t3|D0OdezL_qib`kAajqJ@>;U%Fu zI#FHG67}l4DcY$+@R+MwUe=^hs}iBl?Jp|G2n~Y8Afl(ISI<9sYg`hgcKyR4^)veE zQG@-uzq{42*#Pzvh!@tvGn)W<WbOLJ70nq zy8bTq8jgBrTY7HNMi{b*5eE>xUR2YlIHCvGjY6FytTjNvzhvWSSUpUnbSLcZ$rg~# z%*~0pkaAlRWB)B=7lB#WhNhiZi@jbwx=FEUUn3xXJZSQ@ekw%K{v)lklhRfF=F@~BhxQvqiptpvjLUhZ5 zS;mQKsa_t@Qg*vfRA8xT*F8a*F zizs@29x&(n?AZ0RE%ei@eL%D>?r*v|sX4Y~fs#(VVy1K2eLuxiRChAM#R6l$GppPZriJ+AT-Ft4!@kvW zm4FRCacePtIDuuM60GH201h|6d7UQ{oOVrAGdv$;q@T34q2d*C}8CXvXCD@qe-H)s`23Mk0*zZMY7TOIJ{qVcR=ftNe)YP*dwHEm1d~^qPg|DvY<8_GSWhn#$Z7wRTZ;i z7HDL$0T-Uh1&g1I-C}-)f4tBJn-_FoT2v;aL4mhp6m&jBMxuOdVNodRBpenJvNHg< zBU}JR{_^sWUz?%Z)T5H>3m4PNza~)F=1M`(_tApA43kDOFY0f5VU-vj2^>!izfOZd z-8Kvj$N0{KD*%YU#*q>1EhM`4ERq0R31C#sc0Z7zi&)Ki;}({RQ>2CS0ACt%Sk??L zS?5%8%PLVsNl6F7Ww1^yEqdDCISmYq30KO*vjt!0PG`MqqlrV?75vt}g^9e5K+um! zr}Z~5^p6)8nw;V1djaY~yttyaVWX$p0#F0qDC>vH{hr1oUJs_rtLjPMa!5rx-T{?% z1X5*rE%?(HW<)s0P}@CWGfD8>v1H@G%GqM}O_+;p5Ag9cURbPTwP?!DU)DhiPV1iy zGZ9hI61X>YM6#;w;-r)}B3GU!CuTw(54n_V*1}6?!D(XB^QkVC_Mqypk$y!TYwkEo50jeH-0hw70iUgv1T-)VaDT z2BgKs#XrPU^=;Omm=MiQ-c2aDfEBB>L`EJ<7E+9h&Ysq~T6w|B>ezoABGo|3{&c+1 z09DL>!f1g(78J11WI&FcKiYXc#58ld7KB#ptUp9qp!c{>L`;Lf(1t|Kek>*{N{YTa zmXPFk+w`JZy9s|kWRflNUO;#tT)xbegs6k2O+-ocuf00|PE{EeAivdsnQoMLUIBO< z*St*43j15!szIe(rMLuiB1+)a@ffw5?M5UzB+f12%p3{#t8*7|vc(|N*EwPw*Kj-) zx@22DmKk>4m0w>?6PzHKNxQB4j)7#b^n*X^ow9_TKcW{53=O+9MH;pOSDjd+$v4Fj~297{9(vP`fFhF zOn)s##uv}&#U6jrWbuKTYGq1i2f~(DT-y4V%jxJGFmUZ$WZJMW$g}ym{ zrwn=bp?G5Ew0k>whshauExZXp%bu3~SO%;{v0y(DdM*5(#BA^}X7SVJNxY50(Jnj% zo|b=%ztFbfiBPHG^Lx^Oi7;g)JWEEb=o0|IG``a4dy-lyPL|_OUFAkYc5E*eXMpgN zrR(YMdrXJk@|Udk&ny~~3zDVMVt@c=uDtsdkXorb2_ciF8o9aYX=wDlBQedg>aZ%b zzv~@;igQ~;0`HN?h?KZkhUE<5jduA5&Z=l}NlE;C8Jyt8tKIc@5D4$}ben7^uA&_q zF1Uitm$n_x3PIddSQ?%d8`C8*S5&vFoj;clN33XlBpO$IrsP66GozGUHXBfBASD`N z*Y-B z%$bAk58-MRT0LBY0@u4}IjYZvp9@p{l8SnWzColH@>Qs;PAe!=O$UfJUj5vYT8rt# zFc-J7qGlpZr6(rFE3IXUQ0gx=sM5ho{Uz)WmQIo z;Z)F);b@0$mkH)4azfOP-^UHB8$&`Za-|$9tZjI)k8q+@_PbwKJfahH0*R3JWBhBO zKfliqV4ed_jPNG{BeT#*t~y3ANhuMTj)p{xk*sqQ?Ug{2RDP1TW3I2k1;&hmpw*A5 z@3wx-R1iGjyrYPXko(B=wHdzf*#uJ3A;Gy*0Gg6ga-q(fm8}#$|UdFHFq5fTM6}q0;H$&{Xx@^-C4nvc|^}%EQtF-%n ze9r|Sx_@;1KnIH`B$X$ZOEsQ2G)8-3pO>stuRgd8!^SWbD$t23q*FB zFwv*Fnh!q_)SAZ)m)#WPsivJ=O8dNl@f^gMJ)LuiAN%4z(|?_tw3zG@B*`-z{Dy}n zA??g`t27Ts{_taCx`daL=pB4fXi+`V1~aYjCYqn!O9s~rUWu5-tYBcQqCC?tmZ=_O zrqOVM(Jyd;L4MbXvC;m%_cKKkIU9rb=@D|FBsj<(DH9bgA6j0qU8^tU6*$oWPEQj= zSm7TOVQXz?|QG>;1Pa{1dbL*I|H^t^}-aLikD`@n$GUMY1Zte<`E%OqOQ#~asSFxe#NrpSm5uPKW@C+`+d@zp))xyOT{&n^f zMaQOM>f%xDMARLv*j91zv&t4QTD}$P9&~N3{#3(56LFewdgogoC-|t zGZLZi4n_8=O~9+vSq2=_&`jrtr607zXjV{wgHtb)73w4X6_xxGB8S8F)2oEaLK{%p z{s`rSpfh@S)j;8<`9h^)oMgXRQBlEJZM z^|uEj19^oXl+$h&71pCvK+oLK!M@lV@H=a*VEComVW|L0OkV47Af-z6VGQ$?QIk#C zNS{^o%@x;=1eMSjT=6XF(E+h!%bH1`d>Fh1Xlm258K0Ji^t;=$av|{!S=PA-0hZKB zu>v*UF(RE4M<2Z6*E2SFrDfP#7`E~BlFKpJA=6>@!5 z?2q=UQFJR9;{it*f+eD>`N||Ej%IK*=2az|`C&RAb@)@;rri9;I4yT_nY7S3<#t2# z(Do*5pb{}29&LoF>D78op7akY#e{$=3p)*qVrZM_h49tg5bKO z9lz=N?<(#HAsoOds%B*I6!fo3`t}Z~A@SUmo26pMBg2BK3o|pU22;zV1H&*tmzOXSPp|#bLq(+X{5eNPejeJ4cfwz8?2r3N@NmoFNH>kiwM=i zTNtOB;cQr<)eMvlS*dT2(sdm_z{xrLTQ=y@N0ly+s{$4s(s1ggc% zsstV`26X*I;X5eyJSIU!GvQ(*Qb$iz(J!MHcZNxT6IQ#Di@FXM8cAaoSAArlVv_CV znhGt*fKf%NGohaQTchE>^WTmRWK3m{Y?@AIRv9=&l~R?A7yO`f)m=-BDEZRe5M8}@ zGE8dVw;S} zqa)s+HquC+Y)As;2AzIRa`51)iuED zFbjK5rttsVU?-t_4h;u_qD7g|RQ#Dr*+`D2Ne`@BDb1?7%MI)EetxF?n*T~ff#vP} zvmjr8ZdsAR*^<)JzBDJnxVK0bGwz_TR1~?kbo4RI#^mJ} z$3D7$N$a$76*_VJTw)_yJGPwXQW;;GD=tdC%PccM8YMATn(Jn+9SsMM`#Yd`RHJ3Q z%(;%BUq=PCg$vGaLEHh;;E!V3pBC{yfJ8?JA|^V7Zm?xpHkK!C6|8lE;ln|Vl4Nmg z4bNb{G$0Yrf_WV<7zdygirJYbO!fswf)TyG%Ck7c?`qP{GF)}4ami|I#3GEoadcTn zMM6L!K*}Cb7Ml^G-UaI4H5PEyR^K40OnPhy$JJ%j<~F!$;%T&(<%A5YMg=dD-ntcP zwV15BNOw%?HisNlaM9YkV)d*B=wxc&ks3DY_dxV98C|qhrseO-A*?wilqNFlRM(91mjvL5M zPGI)lz-Y)4v9Xa;46G*Ep0_J%cFLWFSJQ~UiAz(ZguoQSv*8^e?d5-UAwbKx_V1`b z1Y@NJ=-`*j+{J5IAk59={Hg@U!tzG_3||zBk!!h0IWRQD)O=HlZF|NKQb&etO?6BS8lnC0dxwn}#a35;=>ANml70{;llvN0I@Ou!uwepP(RH;i<|vY%-go@BRKb z%f$Q0{B>j{>S^(S^1?&s*gbUqq~9yOL8X9YmF~5zpia1jj?bWcd?WPUq-sj08&Mqx zdjd=5TmPP*x_XMGA#>VWV`v3dX!x0_%f>;siCE_M>S>p%tOypk$4Y(RbfFOCm^u>% zWdNXj1-ovQNv5_vIK%el<`-hBdUG>si^TevYiDs#}e z%R5`GI~v^kJP(xN_2A2~CE6rF?3iOM-wa;6|5(-tVr09%_PWk{ly{NyJy1U1jn;1* z>?}n*6)s>RHQ;7b+l?z|6geL&K_77;#{(DXq{Ss1p1$BL%`F12QpnFb$!#9-o!RPe zRP-L!E#}hoeljk7A~L~i!*5=7ha+e`CF7~qFK<*Cm5&-N=Lw0@yi?=3yd=A_p+GHK zsNZn&JVeRt9%{uiU4Hz?cULL5(HkMFk-l5}LYnH$s@#tulkn){Aw&TkaipM4ph<*!C%PuI>By z+9mwYkp@iZy$E(LdybN$h3|#ChjIqrF^F!ohx3TkO-=#LWlYfnJ+f!bxQ9k68U-y+ zbzca~mMbjYdq2DcUmVbTPqBQgVKsPJqDC6R3s9TxBU0OBG;r25AE!bhBGHNVekwjQ zSoU|Yom*CrU;}s5eb_=+cWH0JA!U~Bo>C+up1nrt+ub|sr$6n$$?aRzOj>mutI+-WUhcZ17??v&uo15+}5Md zNPI3iQy15ue$FN_ zG|CTK$rehmS|YRBh_5R;t)o2;j*X6=oK;SzX}CNpY(2IZ7?Wgwab#28s*9Bq9>Z(z zOV?>bH`CvGHK9b#6Z^h-d2qkUvR<2>DnDz7FQrIIUS-6;y0{wRS*hA?gYhfXZ&FR5 zFc*ECzYz{&?O8N@R7U8q#6dyP0Wlf)i5Pm?>Ft4U>~Utx)q4q>28%v|NL-@o30ouJ zJkJ!_5t??i^!#T&F(6+9$4E{p)-(D3y49(HZZLRcKt*a;$l$B8pwEXzu7H7o5yZ+V zB=u%XOB6GBK>r3d4R;%a0A=Za)qjEJ*RlRUJsL?PY(<$Z-6`FaAMN-IM%s;A$aqX zf5l95pA&7^-0U4tMv{SSys_bnAXj1E^3Mya*oyGk%4p{eju|Rabw7BryIp2fz1VKO zfAJb8`Vk++Oy2;3kq&}pe-R&@Xxvd{tSy9G(KW;6b@Dcx(YMD|=H=L*T;2CZr9|@^ zW79R>d!mMRm0!TZ;pDjmPc6sl|FxJFwYHuj;GK2Hemnw`_-br9327Vv_1}^bC z%R_cRu$%yW#b4_nRQQ}BzL0K<4c>Y@1M3;Cj-?qaWC&MXZ!;VA+AtK{rZ}EoPBRVM z>(WLOLhQ#WrQQPiR%We7A zLtv!25PfNtG(`o!^lRt!PnI4R)w{%BzFOkf^^pF!OQ-4Y(!NI&J~XOJmh}*K`M)rW zE(H;JQ4ZF_T%+AF2^EcZu(s^Qo5mm=yPnffIQy6XSYSt;Xa9*1iSOWLA zX4{HUdaiter`uD(iUtTP+Vq7Mw|CAevqbxIQmh`bXRU6S{13Q|_pEiw1PrfrHVc8R zYATzt^UMBI@$DTTbpypVqZ{>>5G)Qm&&Ea@RKfF?c~8irySbF7{3i9|5EzZlCFhGQrGLc@y(w{r(I)*Y}0(G`yE$nUKn(5 zJ4}#77N6>7RW|n6wS(on4jedacFk1R(jxTN1U+{R&U)|Oo^vGXKDqCq=q;cB@52HS zgVQ^*jKMxV?qh*MCt|+^$x<%|+4e$l-Yt5FKYak ztMrB|(sjPULytTCWM=CApaR+sCycbnJJ6g%)vIb8`Iwy`dM)y?P^iRuNCe)8wIpUT z1bSNRuwc3ujogl?q#dv>zS}2lJTtk{%Oy_s4vectCBkd6i~g22R5`APXt z(qs&#kjH1W0q9`dWPV%UIuxM!vgJSgf)q)ahbrR*$R&MpxQ9SNq8N1CCvb`%CNBOndlS=_s-D$d{i|>2WUP%9%e7XH)OJ zH&n!*Et8S!ROZYK(s*4X2kAQH8ErY4Go7E<`1FVPxThy3hCSGqSzV;UanQmBlT2G@ z-jI5~lWodWVZt1Z|M?_}Og_tF&jA)jMa_7XgUiz{16iBmwNU%ap2X}9k)O`+2?*

    aBVq?vgC{bS%)a4F z{93er*ts7g#aZ6n8DjP23ufmSTg?-xvPxjHA@QchX$#(b$bEr+0edcHb$>?4ueue} z&~Rq`F_ixakwIAt3`k-vxW_;jkvQF#o|S-n?7cQnC8Eh)oTi#9!6=t|2b)l;3aoV@ z;$w1M3hp4lL3u)2)Z0Bji9Pk0)oM>hI^)~3C3&&@a18OZn)uH4VJT-?f!Kr*BbXukUh@7`PW9oe~^*3QbC%`E`?q>5Hro5ZcYnuGfjA-MW21bof0 zBrI)SP)*I75T*PVQ`i$&E1-s{ne@_K9<7h7+DAQdGYH^a&js>XU5OW7gt3p$c%y@u zOCPaXLb`+jfAIK7>s{I&&))J`E7M5SUfKu6sI>?Wd>>35H^hZXFK#uZIH~zhV@$cY zvA#S71&N!4jj9v9FE}r^O(nq&ggNUJavsVq@=P?Vur?uI2+Lvf$0}=nrcu8rLr4k0 zP=J$WLB{!lL@57tP!R{I1g0tV$W@0-feg(w*q@?>v9aF17F3YIWoXVD9uv#^Cejld z72K6MS*M!0Gk8G>M39#;vJu!zV%pU9QMhNezK<81tKG+{L>0NK46I)ku*BugKoylp4B}+P=ONU*ZYj3A->ct<&3u5vSG!}f1WpXyLenid zp`AW=&R-0+ziRZ9`!Lf1H$|8e9w-3;xiFuv%F`6p%=IC1WPT0wQbpwr#>);ZFT&a- z0=VyNXd~AATdNY>mHzP}d7x+F!1hI$zb|iy2w~}ojDzpxQ8$P1 z65lb4PrBTHvTQjHQeC6C5#`5K%(+Z^LC#BgW7}>(1exb`H5L7SLJwNy^fjgM*4sD0BA#WFnwltg7w0Z*>zN`;GZ*c@P5dq zo!{lRPunB*wpyEx$Uc$Mt%O7;XBx&ihE~Nmc@*_LePQ3I8~224zd60zz~OHt5SL5q z$$s}__vF$Rs$uO;%6D@T>?;7PZMSvAIao&XK&nD+6DdcN|yU(i|W zAwjP~T}U{n^5t$8$G@rkf+Xn`uEP1zscw;T_-n|X>`_U4!;kkBJAz*C6Ksk)AeOUp z>DsdqR7JK%O*{=Uxb33|eZ<)mc$2f+le0DvTj!IJF&S-t-)ISSKy%i$T9VxE z(8;|UKZy`+fE)|Qu3A$Z(yJXPgTx{oRG_vvx@}L|{3SKx?{Cx}ua_zmq`4T_$VI;s zRlE>UG{+7zC+GKz3%hxfku>R-nVGmsIkMORTj1uVNQ%++8f8VTxn z92lK@XX<$ddkZE=gq~D&3Fv3T;~JyJ{j4dJF2vWR^(2L|%UjuwTK;Y{@Cxl&^BZ19 zEwzL`;^H>epeoh%20X(QqSH>aAL%<@g@1g%Yan@^qt8AXsVxqUus`naQ$*tPe&h^I zBKOFZMADNlEA1eM5c<3>{zLk6_Xf03HewW^NG{{=VZ?ZNc$S%HoVz&FIcg;p%$a8< zxutV^ZzbE2zv8nT4DAUFXyFw1G@Z?%=)x@Fe!Pz;73nPt<43r zSVYz?$&V;hwuIkdCZ}baDhseE2cwya8y6kGkQiSUntWULMI(S!gUiHS_xu7q%brtP3nN-@)N;H+T2R2in=Yos6NyL+ z2H?HpPs`YBDXeFmnI{$?99y-Z8*K!Pe%4eOZ+stKL4j>zua8O|jJ1Xdot>A5XZO)Z zam!%f3z=1jVvU0}yjdiB{K#jicD?T$+p3BvTd1%BxYWkSX&!*j9Uc*OW|fpR(i3!A z=ftb>Q8~M6&T{VA|LtJppCCyZL|$EdXm+)&6rb+$Tz;FIAi*qSNCBj`Pxw3%jP#Zw z{NJHQjH@LIrcq&xDFkE`AxAp)U_T>&mp>0niF)3m*<)r#c@E|ZDb=XQCQxQkMUP5` zh=ESlCLk^>!LFns%a2us27jW%DbT7YaZ}_-VwA^fd~|N@QOZy zELj~G&2`97*rNpg_ef9M#Qqu3o|geSIjpnYF+zy?fzHYFiFwS)^u zF>^^+M%Y&USQ<5nPRbBtI};6`kHmm zsM->c@dT5!Szo7;b~as4$0Q5LI0C2Tkdj{Mz&5XHZ;{8@@R-VZ1@tDMrf-Zc@iYFjJrWQ|&+7GqT_IEH1C5>e%J&^~Dqjq*l7&$B}2oFnwdNo|eWU3%5`VGlHVRqdg>Q z{Towz4RZ|Si%H*;?H-;>=b9U5h6@l1vcW~o4=3Zh@y6-a?N@9uIdoBhg^W}tYTePZ zvuo$s44Yam@oOrA(iE-Qrl8wWV--IAhp#u4`$lvVCFQr!zku{jr*M4~?KrJp{?W8J z{=RTa8yogvF9M%uJ7SQut{k)Ep&(~i4*vV{@Wf%$#$uJv5*Ow8H_P=Um6bsr;7|;V zjDcv>i<3*!7PnBS%d?PRSD4`qKe{J$37A9e-#P@q1D3Fnuep zS_wC5=h41zq|Ywr=vc0+_Kyk)WSUj9#JfSv(1p`FV(VDKq|bLriWieTFxboPuO?bY zS01041YYw_SI%g|q?JQnuhm`M3?6z>{w>P0t%Ecd0z^uCc~B& z5unsAH@r6@`$Fdd57n^Esvd<-20+xab-gF)?ZD*XZcZA>vewpSihTARv2E{YizUwI zSA^5G_k_C zR0Q$hEl=*b{skG!oODi5j*NsbF<5fkPd6Ekd1&^$XCQj&fOR@IwxdO(;E z^Q8gSsxuqi|BP<`@WuZ*7&C!jB?MEq6G;Gfadx(sJR^C0dIE@K%?$Q$NnUWGts5v$ zNRUWX(sq(|5~0dE9GpQJzgZ@uoylH_kV8E(q>N-6?O=<*5_34}Lj-gSn?||FvpI+~ zYwDxQ@m(WUUC7qFujX*yhK}K+M~V1WnG)@?#8$?*sby^r9mQaNu|2>N8_MLsnET>C z^0`Ux2qZcLy!X(aCrh@yb+u$DM+22Fu9jznGK^ss ze!@mm+M17P#g*2gSN^~eS=Dh_!q4S>M8h@5?1Q{?C(YI?s+~YwBR>nqAJfo%$Phy& zHt20bMUF8RiuChijkxJzyvfY_hPmwa0R-WzZ^Sm1S-Q`bvvs6QzHmY6?Gm!*Y-dM(@%lDd$p=DtOo7tXA^-tsHtWQgE$Dt(;2C~a`;abZ#O%!gn@rKybJKFa$fK}%N z%^e55dfM1i(#0wyb@(TkM@J$JU!c9#Hl1`f6?`-9f0sf3FcdmkC2Y9N^h%JXLuzVj zh5N*wy}fA|*znPk^ICm;Gx{(_F7OHYJIH=b)bD1K;J^q(d@_F=cE_p6Wc$(j zWN1>U=X7;V&WqVuu>s0yvqfGiQh!gF>(Cps0~!(Z%~H4~>(E&KF?CRv&6m>SUvNGC z#H;31Ea^#-b6NR~1`8$AZ0If5OdtXMtXU<&2|y%un@0S5WGQ84?S#18Wv)v#Ux_3J2^rAK7>Mcz0+4y^c6> zsKYEwkI|`}t?)R{ba&RR4-Q;Vp>VD=QEv@5K0x(Is1I|7pPin`T#F~qD()PYJO3f>)DZC2F3sXD)L``=7{l=9#-kN1L#M@ZG{mZ zX0*UAc~hgZ`dUR0v!cjPEI7kR%>dwXn919l*S2P761e~_sl{Xq9XCXa2+uC{7SdWb z4EWA$ZWCf?S4;*Udr(oDx9o(aET4dn*ZkgUw)tJ^bXhJ#l4$lb?=>ht+E5&ChPX?; z4Njm;3(?o!x2^Uk61;aGTl|2-Q#dl$nTG^yWOjDkb?HHRSP|7dn`5snOyAA+6(WzP@64AkSpGh_g7RvNM7ZIgy7j~2xb zWos{}@p^AATqd_nR&mnWad)Ou)$bi2OWN9&g<3I7@TK2!RR?3$lcOxUu6S>ahvHEo zwtV}GrHMGheV~OIwC?xudl66Hj;xe>uz!G}OiSEuKp=S?!3{{6WU1U&Mvr$k8B#?n z?xrP}?GE@~&p{|GZMriPAyyn>m?Ob%VnRVl()G_8fJQnPet~WGVN$QGUInMJJSc78 zd;hE`@RizP&ZqvL61n+ym*xPP0<$Ou8ur+Wb|SmRIBz`4ej{gVjXA6?|186B1-Z;D z>HvT2KBbTk#-^5(O>PZkhOa674T;?jmBTO7+ZiN@T}t&rqpXC@Un2r2=?B&Qq5~II z?Jj58NN?wsc*iSHNe-)k_wOwDco0SZQE0dU_hQqN@2T z4KYdTpMnE}SS1bzM1DE8M)F0xpIun^qjGUHil~CoLai1Jw~3SZ z4n&TBo%P(ViZ58Lp3(p^cneE2n1oN%*}utOJ#|0V=fpL@V`2%2wMd-TR@isvIucAL z77XeGyJ2;OGOZ8G5Wn7(8F!IP%ZZ3H;&D7SbcjrN$7omWOZFI zCjh>(+WB%dyBQA&MA}CuF*NQEixyto*R#O=e$X4k8k28qxCdJtvQ6@G7$#RDmLuO; z$0NRh%f{J}KzU(Rx~dDL{d#(%O1fPVLxw98NC2U6Cn-umnQeP_f1^5dlkKSf7t|rB zieRAz6G!Gr0muE}13{J|){jroWIrg-WH!N=*K&E^_>X1~Zr%_lk7kwzM%$NMEsC*< z1?$;038JABceU^GzG~pn*M5|tOd6Pe&k5K5IO1c-R*l45I@8#qw2=d%m6C7{MA>!fJ*6=zycDd?5p!*aJV+pnnDH0Z+`s z7N&o_UzH7daFr%s4!REF%^=0^!%sv*%CXidJs6h0=95#PP0|UF^*_kg81R1E*Qb!_ z>lR~_dzm2W+vz`%X^Hxs;H#oSCmGNVz5P|Oej6OELWW4>`!KBbT^#nwN)5Q{+jQyY zo=ZOLXNw|)7aVeT<0uT!ZQ=QMa2QJSdvem+M*mb$@KSS7`)eZps?3utrH1R4^vBN% zyQrOc-GC^?g^>RW$!=Yo>>+}PuDM4}eS+pYAxYhb8g}F3_m^Pa@3A>TyQrQ~($vxU z{j@H%Vl1(}atvWs36)yp0lnKEX@Y_;!$)o~ePVjy5ek{BXIX3?{Q7PVyRIXMl73P` z&=TmcvpDE_&<=YBOI?TwQbZCcmSm1k%s4QHG%|=Ik9{$kK>6h^@<o35K~&VUM%utlD!d%y3FYTO}NZ2pa%KV$s4e$7Bt zn^uh$Y0o$*+E1jpWN5gI`S=iX_qAkscFyb8Uj7O6C;Z07X(KnMm{5gR1mAc2_K%r& z_cTE;JFMA-k`o%|HJ)kj+O|#%TkkQ}bmkS!3Ql^xaB8f-q3pJ4%MR&F{W%6?x*$&$ z!6eIY3_Dtb3C; z=TZfctIS3sO!~=Cf}gCX&&Fq(GIhXV&H=VVshn5Q>{P^?(M4z9M27prGQLMLLT+7@ zm}E9?L!IGR4Z_^HlgGi05%Mt5Hf&Ew{bfz8O0>@lGBD4Zk>)dXSdxALgxJzDWM3(- zdT!gBC1+R;YLy{mP{Um>c_jo*82%q7`d=90gKXy4m(>s>WcOQ4b3hD!_hZhOwQFMm zRqde;O>X=RJ)Ug1xBWv5iFZ;vg9oO6Pb66+FG1)LzQ0oaq=Z5-2_)@yCq|Ck0ih7C zilj_yKtr^GeS+~iRvG^cA32G;FzsqBDX9W@kz{@I9)k%r_iO2>U^?lYH#frK$)u?x;zp%JLfubH#AQ6Tq^#+=!-H`@Jn50Rg*^P z)^@o{u=(wa=l%&2{UrEDrblm;qcIkyaHERV(MpwhkB>${{FVsQ%EM!R(yU681?8HP zfz5#9mA1c0X@O&-3+5t_Fw9PD%Q^F?IZ$wrt z^^eQqmE7+dXcd?kF#yA02e?F z*@$*oS6JC$^1F5M?)aU4Zz^GVEmyq_MWWwruANoco!_%T!TH+c1c}twPC?o zOW+tuZ&`!oR*U@kiRi{EqG;oswJHgvY$ms$ROV5aYed@hz?^*kd4}7An2j{Wd`p+}7 z&z+UYyMh17a{pDDmg}HQ>L6qy<9wr8`$UB$Ih&fC%s%cXCPqu=z{Ys2WtP}lQNd2T z52}<37_u$NtkfavroUJx_4XMU&D!duQo{Lupl;iT=j((-_}$p;-q9E3O|(d8u!+a@ zsgN{?xP1q3%5mtvMYdMhc2CY-627jk=fu3+XpJX)8F&Ou1b7P&Hq2}ej|qEix0JP? z&G=w=0xpED%t5QY`d)8s=Iu#@&p``k=)UJ_G(Xn_5{cW}9!Hp={5GBND^Ok+mpY^M zFRbDlMXbHrfa|jQ$BPp&sU@pigKwf9*?;khe-z|QvTz@$VPrN_n0BmfY`$K`RGBCT zw2%n6C_P}ai(`G~^@N$Hjoo;4A$Oe{3Tp zVtyB7^b0rV6AMayeiu@wvL^f)17vhkp&+ycAw6k^Y>B_?0{LNS93xmTz?Ea=hfq== zYejc29ha7k>;i6fFAn&I;kV@?rL~%45>jIpzNi!ZFYb|pBVk`AgC~-xoA0#_bJspZ4`b%aT37eSHOR>xXX!tAt^5X`Y*guuv60UT1(iQ`WK~C>i z?$cPis}@%y#f4_Xw3Ww6<=5X3bW5Ho4_`>`YuY31Q6)lCXr$f|m~pQd>hS~_D|pvq zCh-HSnGnCyC{Rasl7f+{WAbq?siW3laW0%!vm z=-IP^R0Q={X5y*aa1N%O^*d6~H=?_wDlDatuvbnIhiJ=hmTh4V^g+i{MYG2}cgvP; z4~&8yQ2f&6#!7RZz?K`YAKFh#0axh?4pGM$@@}NezIRaN=G{SgWVYzH8&Bo75C|Qo z9x$wlU)})&6N3QfrvwTU39R>XLS##1@t@33L>^Pa0hTTL6I;}$&*aQ9nJy@wYWF7P z_S0VbafY%}C3hKn8Hl@VbzOENOZN4KU(Za=8i;pfgt7F1+ni~Ac5Z~j7`aQj-Vv#- zRy4H+&$OHX?1yNdUTe?vyqS$i)C&T5U#K-df`mas++H||SG60_0@KBv#?=)$)j ztn?_O1KJ0f1EV>{u^d*1L(!-A1l_wy$goq?dX$tT|wk;vb z@4>aN0e30!)WymCFMnJ+r=)>2qDZI>u#d#Pb2;lBe9_a-q7d9Fz|-M)v9XgxK-}%G zUG1*|`l;y}@i_$QRQLk&k}}K@Qm6 z?0Z*LMelOh0)QgvJnRF+iQu1K?VT>CW(1?3K^fbgF};pA$V9@B$t$LySAYj5(&lGE zorW{Lhru$PfaeFub7u|DBcjhqexj%Y{7y3StoBM^UCT*W{qvV5yW1eoQ^vBT@6Qiq zW$TEx_l(UK@~lBj4qq7r#sPIfr zt2w#43Sd_BbQ2qUBiXHXBR99gNJuJ^zV6TGM4tMPSAl-Bcth8l zU-{EkGmx>g0hN)^Tsb@@rp-ySQozfK^|-1_p_hZifMgs}{dS5QY8mR;4c*yHXmxBtT9a?a^$f4zn+ zU)o>{uXjH9#x!5;Ay(#(1e)hasn-c=id%3G_fuJ!57l=|HbKZ>wrQHv5>kbe6>opa zVj4f?FaFOj{~P~gsv-Y`(O^`R*=aaL5yT$Cd>z<~dGu?kSssJJI^q*R({+oN8V z1`6@3-<2RF1o=heYp0AmfBuV){i_7{Osg~jm#MpG4w|x6wtPu3Vj?-w86>0pz(3#Q zKf|isR|E?SlyY$m>1r_cH`9$Lx@YOHKNvh?L)qxE!%;cSu2p{4{QXy7US<2OSY|## zy9q;}7_FjF_}+}8yRp+zd;ag*8Z6d7=djxp@hKL#ol(g4yYO#8`tW5#4q#d$xT{Y$IV5aNwG@MLTup`gfQyB+z8p>gm0|6&sam7)IrgSLJ&u|l$@P{t0)l%P&26iKoE&-Vz^-R4a?t!%Cr#97E~^!c z$IXB8*uSeae|O}`0GQp3Hl~O8t+sDKdx~>uoXikEN#sps3b?#*jyqP!!7Itvq-4b1 z16l7k`2{LsI@cB{4E!;F^yKw?CQi>YjfCzRI)zd`Y4ZI>tMX%IQD8(BGh-mdMaB7W zfkQ&C=TUTMU+>qE*c-rexLA4ER?nsSvqrf~r&PHA#>vgCzP``2WXXS2{f}&PTleJ& zouaL6`-#zdFLST|&o`{2gkZ5T6GLY%K`otdFGH0P+Pk5Q>wQZf^$xnhsz_9bhsoCW zsbj<28-GV-6^HVd$#XjzVC3rRREk33Ia`rvj)&774qdPwT@&byDEUssBL}+e4>^!3 zZbi|xY#f{a44HSpRJw!UChk&S5$EuRORE`qHHx#l>N8?!ofu1jJ6jY<`tFX@jf|zl zzKu%M5m%(aCZ)E3v3) zAt5eUDE(~0)oi~Zhk3n?_&G4tl`<52iz?|(_KBh_Kg20uZGXO%mybC8lxs*6NR@~I zCMN3{csYJ^*@d3lxdtC|%-o#9<=u{%x2C=PzN+d5ug4+2*IB&ALID|@a3?&A^hKU<}#d8P{y)YE!ywO53?0k`h~G7?jxu;;kIH^?7xiE&-=JNS$b8) zXzff4;7S+;#+P+yzu<{S(>Bg#N6Us$7d-x%9{6Wx(tiT;=Nnk105((=iq8X5GQe>! zc%A9guTJ`LnBM0o^IQ7W-!_H)ubiDv9UuTq7_r6IY+~=r;tUMc_!!yGp>z=C3t+Fd z-P!2TsWMF5xI;1lvDZEdu`plAk0vrPr49~OV^MGxdZy3tvH|lnG|?YnoY!D_ks?<^ z3tp8E5E*xy#K~UZ=h0h1F4l2B;yD&=r@uEgv<(XZs!&?5w;j@5{bqqkpprRts8+LH zb53n14+yNzYLY1=I(SkV{RRkg;ZY?k)2? zzScRGr%`GCa?_H;f40Uz4G3HplGVo@`qe#kXO@}{`WEZs0Qp_E+q5~MAbNKS$ZME& ztY96DH7kIA5RUNV^xNoqvns_4@)j9CYO0hujQr9pQSS-0t%y4(!{s0|A?p zFQALvsr2OI&HNXf*X|b&6&do>_Rv*5;sc^hZ&Thnn3M%|gQYt?8WsEQelHIkkkA8_ zLo;~#Q{?r1bcAzeiST!O4vX+UHivT-44BR>L>6Yfv5@S7D!ldq>=!))gB( zHJwW90^+cfUwd7NH6|y&>b#5fZ=Ca58eH_AV^|M1i>K~(*ZCIy^ymjf_78cJb-al&CDyDil~?iC@xmqPPIa}W&b2>CAS1t>97gKg zj)=b+Hato%6c6ol_qG}dPAInhO)?c zWjT9y#!3dJ!kt5T22Za#e&0CBF66uiSP1d24IL#e&`y0xyRs$Op z-(88Y&=lEQ-7t&wT2Qfb;CY*ES+%qY$ETM2JIN)FUWF7oY#KY{&tj!xTPcBe%aksg7kNrCtW*E)J|GH^Y$HlzURsR zV|4A`nBo6U*i3Txr(nQ<(s=!SP`z@nv0oa1D+oEyRE5Dz3~$(E$}D?O(Za(G;!pr4 z1GaSdTt=CpPdO1x_a9yliz5nlQKUphAcu3nhJb&3JtvTx_@YY{$yn;mvaUnQnAiTZ zbvz4fFKvseO2D!fQPQ5U%yisxq6gLDS9?Dt*8J%>9g(DyKd7%1*`WxfWAYob_ zcaO@tl9QT3$ty#kWcf=d#JQ_;)$S#R7Tk+BxMMXo*tOPLs*QDbopcI^$9;`&4l3Ne zQx`K-tuA9)rgQ!`p5;_9(0uLpm5d%J8Jr|;9M!2mGZ_EX77+S?P0xz^mQuvOxG)PA z7S%W>y!s-nU!p^?yF(B{aA77c`kr3aU)Ge}C{Fy9g8^CM9ncs3W+H3`iJhIh^YN17 zlRU^?kVP)X_FE#w{XJiF5MM@B*~%FvvJ_@!6fgI%H=3in9GYS#xoDoJ9!HXPbzH3% zZlH*ehbNEHgQH|9!Xxoaa7}70!I@(3mA$?Q>jqBfug~X)2cXix~Z$^#AD7$%GauM;qBuAg6v#a+_;7Z)v>gWL}A>xtP2@pmM0N@=9T!38}(qOp5F3djt0%^L!9H* z4FyW!QGWYZ>_pS4uyh78=NL2>yzFZojS-0ksHPxoz8UI7x1jyv@- znuERwbqGKCzqJ4WIWXv?eBU2qFq4#>KC+;=Tt66d5m|37N_~_;5TUfuge@QRDz@1& z%uOAi92PZZ*~l}fI|=Pe)^&wzHOHQIF3CmZ@vrFSsydoB)V z?)1iU4Cy)OpTvCMQ|B#N6}u*nL&?RwuWJ3Z1X56?X@BX}$k>AS_r-YTG|Hqn!s?4w zN`FCNMYQK6_51Iu7?9_kWzPz5w7=@a#9~+(=@B#YSNihq9BHmJaTwq>4P4wL0j$#J zB0d>^&ogOhj?@!ukWjDg6;&s5SjEdp>NG0`xdaQW^T1vQL7(T9lH~NcsHXk>JfOZA8P347v9E+MyWt$C83kJ>xlj z(Ptd*`^^D&NGW6E%iZG*OzPJHaRaF@r$rqe?92~ET~#XTH>6XJN$ZZfY%6y+>loqR ztay@5ySh>)k#=P~21j4b7|YL~QlfB|=>*32DXuC#D+@m0k1b94?{(yCdJzXkQTm{& zo-qniV^w`N$@t9oh=bX(xMG+2jC}KLm~!;fh$L#TV=Sxt&pm8LqM1wi-Oayp0e_UD z{uhydAW$q#)9e{KZqp=b${|HgY+o)FjrkT|0Jk#6p=J~ARYj7!h-91*4mi;$?UJ(- z+nZO`Ox9wa9(`it4S8pOCo#4r8v5IWd7&7-Ix~Uk63vJpjeh8vTc^g%LRevP0UMv7 zUxh#1JbqUF_wO^~@O^d$5aH^Yy37XCca1Y89j%lvXyC@4*a$0N6_TLJ1+mOAU!~$9p~YW z>?0-a{&hT3Z{<1uEC^R@U#$P<%Krbik#^!>)O&&ZRn1}VU=-iJSt|`D>)q0>O9^k! z+1QZ8K;wiB@fvRRZBF-6KE!^nwEv)k`_O;9gm3hGz|uzSjMH1Jc$1EY>i)HMOGO80 z8qXh}sO+Wo$%*Hys_nWR`6ip(&KJ5@eS^dJtt1itBK zow3P+Sk9Stm!8O=p(lE8qt)efCz z^j3yxWMzN`P<&@V3M;ptG$e4C?fB{TG3mOb*O7H4M2^A#+t>S&qA*Sij&{;Ww;ivc zJkA{_NfkUKjG`j5Y^*bD=6c0U6W)A`T}B!l!<=ypQ&cQIK6ds2OEL-c$-;4`i}*s|^uLTR&bjTd^2tiD<4-=I%CZhzHxX{P;R6mwJ^eJ?|rF%PqJJU5>o z-x(!VhX$-C5al8wX`6ans$=NncqWo9v37&Y>vK47@1Hu-VT{k@|A-i#EplY z-Jc7weQjL1o4|!xnk6^na;x9_TAM`z%NUbA0z4qi(qXz{&rs~*l9K|z0Bm&0aYsB+TQ4AWNnI|{`;jXd8EM0L+S z)#?YT6V3&%j?%}E>Jg_*jK1G1MSi5veq6k%XsAIP4`G_(F1sdZ$uZ*!HS6aE4{=O5 zCwR&R$#{Ozvx7V1>{eT1_`k^i-~P%=;-7*E)7P-F!W-BoP1<%53uYsD)q}x!-f� z@`GL3@AEK6Aez$u-#INg)9c-Gn4zNZiPPN|Q&MJs_GmPkGs-0KZ^ViCSCR9x3@ zy#nM7wBBvstR}4Z5S2C=!BZM+O|iD_dDSV#`zS^jG0#mHe^{Faf@}l`ntxBv$J5Zz zp1W$K1@E2k@8Bj#o(i0|Klj`f2?G~y9x-OFxcNNQevc+t_$)aVrUf4rNd0}TT)q5b zQy|=KpcJg^>Fn&hySL{rj?mcS|J%9|Ay6efO(`IXim<~ViomIB+s!c4_wAb@xt_Sn zx{XEM;eAcB?Fm%#25GN*@H1hV+d_%kaYWr}ldBghk=jg_?a3oF6}^p=N9PvtVWX}! zI91Pw`a-hmv-?WK>QSW8yOmxi7l)~CZg*NLl*q#$eENlV$K_>LHVa4iJqD_yQ(^KD zUFG?S-(hGOjW@ohbg^=6s}eIg)ce>ZQ^D_3<;hx#RLncxRw|Dd?Nld`N>03mqTBFqust%*}Wj=EAZ`Pqy)2)?06Y0<|$MjBl#Lv~p>^ zp7+MF@4zi&t(lV{Eooo<^;?zNP7Pr*J;s}+ufA_H<%1>eq3p6^s6r1~V(s$=*@mTS z_t^l%3t!#ZoKrNzJvjNMp|u42$XXY=-sX#@UFwPGp;ZZmf)|0!Uz;V|;Gae^D1QU&{?H&m_EA@oh5GlW_xnz-zf!)=2 zI)LtlKnr(JQ6k{S>bs2%J)7X9Lp$u1^1Zo*rb>yrZM`m)*xv)MPb>N&GV+{=ONn8u z+zt1g%{-sMS3C9Cc|nAr`*p`skc1MAIh9~JBjlCGLAC$wU)?o zfoQG5@cV0;;-LSwx79_APn=n~4<|mFn)&}2wotM}*w#O(8$OnNs&MwD3Jun20%0Ya zSm(A*ZvWE%T9-G$r;l7ewZqw5Kya+waMSc0@y^x_)?>QtL(A*%U$Jz8=#(e9PB(f1 zJm=67p&ueFz#t)Tmv-yr2jaA(kzPTkOT*@C^e0s83OlQLG_V9l3ZnSQFerMSazQWq=nXC;_w}jwf?|$V67~s-b zAsex=M&?5RzWbecns*{R*e_Vn=nGLi%V>MF4A3_9m1a43ymdVbw)J5zeGKHuylOb< zVP=!KmvcL<+*R>JCqUsk_& zW4C9y><@aHdk$XwLDuVBcF%jACYKZ<$@rp*K4$wr0@cf6mJ!P7o{i{2e7ghr>S1kN zeR8fUrQ0d&>fOQo*y8H$`DAgrilC$@EEfO|y7Gw9y`Z*=c=9B8y6(DKLAxAuHDi@) zjf>9ak8Jm589JnHeAi709jbdkv3cs|IbC*r#4`DKMor`kRP@^5gLZ((dg6KXZ4qje ziUXP3rJnZ$phr9{2=-4!Y;$!4kUptX=pzV@$Eq9piOlC*@tMf7Y&OWdZY6Zh^$BY| z4FV6i6F$vGU*k(vvkN@NAe^VVt3ZEtT2Qy22xDP9dc(UKR@n;wzAPt3?{RP@>!cra zY6~{+IjhyNvrW8tU>*1gdkV3JI4D><@Y|cEhQ`Kg6oHCO%D=Z}c*rGF;#8 z>w+i;j3bp@RvtiY-4`p*3$tT{;?Es9QP7sLVdaNQPe8Br%Du49)2dGh4v&Wenu1Oi zQo|-2r~TshndbeTm*J~BjV_so^AoOhU!glXh2OW(Xr!~eA8i4yD-V)__V;b;1K8oc zsr=z5J{S6D$1}QNuaxmF+;eF`>)Eb8FlFAYluxFPqjWkDxgXd)?CvS)ANji)l}HCR zxfpYC*!bMq)8ubRy3y%be0FNdRd#tw|1CL@%FhK8^i?~XM%x5$gU25$^*c|4TZ13#>>#bRHg0so(%q?2*RY%R&lBit zLuk$SVI)Nuv24QK_Dio{r2DS97q7dTFnrdF+J~0I!s=XPDerMlaX=?NeMo1XWs8Vp zoR?qsq@Om7gAB$G@NIXI?e@?R<+LfK$ULTP`dA*`x8D4C?u^xcX196xp)Y18g{OB) zBvgNKZTo(XKb(Z863>~Iwr49#pD66atg6!v@+uG39 z50{Obc5Hq!K0o4YeXynRO+Y>UXrs1n=i2;!a6Hduw)6|=w>r?6RkXkGzK`!Iy;gJK zyrF5wfH-3-aRGeaoU&kXG_{|5vbS%xFr+zqH)#9;Je{wVI{Ah1Sa@naZ*+dv*jWIx z;N|Eos&u|sqo=p{e7kRYDh_(u0#AeWfNi(G)n9~gn0IxR%{Y#LZLLDC8)0P6%nUBJ z?XSz4yTzf>k11iLT}>IPAjs25pWXS(9Al3D6UsCXpYL?KrNC+B5WyGd*2%^cqyeGD zntj&G)IIdBN+SF6L3sJs;)d1U?AQ;>mAf!#dy@_9cb;{U1A#U48N-PSAELJVd_cSJ z_+^*x&5Fi^M%>oy(!m>Y5&Ln2ROc_3OM(IjU5JiY9K5j-9X-W{J={8dJ}b~Y#k1c# zM{Iy0d>^Qzy)Dz)*%~p*{!E8&VEvo0LO+YEOQacBfAp zh}~Vj)$zKLCF>dTee`zoUa?=i%cwKZbtHL(E(m!6ndM-vz3OBGZ>1I6a)x6ZN%mx3J}Lw75uGI) zpZ**?8x}6~p@OI{#R`F(F)T02m_)LksX=26L{510>g^L>>r;s?{z}6;p%$X?Wlt1g zD2S|+wfXAitT&YXVPK?8gy;s(>o5bHS$>9hdWWai*@7j(!gKaxx`p(AFg@CLzhX*DISaP*@bgoG47WIs~nb6nNUN4%>M{our<%cgo;YxJkJlV{Ab#$yYX zx%f&r+lLbL)q-a`&eDasjczKL$&5q3-H7$yx@A2J`c6bftoOQBx<@9L&Tw0T_1lTQ zZ*CsV-#tB%wT5UfKkx^n5=QrPuM5@b?K2fHyD8695|k|u2*G%dK8(~UUw*GY58Bz- zFCy(LDqWH|8*6ANWhS!kB5TGRnMhm(z9~`2^K!hk?aNo+2xLs53*n#QHh_?Q>#bpZ z{xTV8Ev}Cb@!irscUp|3g!IR* z06E<1i~7f0!SNtt@`y2J&y((_895_3c(EM84Hb7qm=eYXOcaM?mavB&EyIyrr{exC zx_2D@K@e_y0kguEOKDo{B)MTbpO50km{%Qz1WZViHSMZ{_DB=39wBv{`ar9p=S<}H0#lhxZ{96Z*1CptV?XkViuXT zvBl^;K2D*~J#oTT5#(rdgpT#crp5ij|IQk2)5!hL%Zbl#h{Ad@YS^90|A1%h4HV^@ zn0`C^l9M2;pU!Yq%H^a8M)t+qeY(UYdfBFCmZozhpP>&i!FA2`kBF4))u*Z@Zn6U( ze${x;ji%p00?Lv~>Y9UxH$H1uoGFoCEtHEiY^QDccy(Z;q)?kxFe|hys`6?e?nlRb=hg<|V)Av@HM zNhjT`5{uY{4XKS`tf&cANO3O1wc=XX9v#}lmKLSVKE?N0^iJnjj!ZA)R5w8wj}&Kw z^;aycYez|oB6dn07aAZtDe8F<3p0N_|KiLvSZKa>LRVYhrlJzmBtNP+(~5C#A-w2( zd?Di-Kl8;J(y?N=#rro;&a00+84g3&xy+SwOSRT_wub41AckyTMhoZ>3O&cJfDj>* zW4%rFewsIF1L&P(wTPqC*41Xmimr`QN`1u+!oLM(Q~2`AaIc99_j%8LRXTNBi*13M zi^7Wz6MZizJxm23GUW&=%rp?V&`;%1khb%pStd`8!6yE^biX%Wbi<#=4VG6mAL%$j zKNnNLbK|?NKY>c|uk+Z@zYHBY8Cv=JSrgUvrx8m{kO{9#l zw>GVO-%(jRsD5NPTr$pZu(3Wm!+}@BS=c6YExofS-7a#xAAc4w5>}bd|I^>z9;oml ztkX`9>Mo|LN;A)@rXGxz^Z54aDw(z0-x^8p_R)-Ea9M;GTIr5>EZgqVQb@xgM2 zLKc(#BfHWyQrt>GRUXtbem^aE9Spz_6%%XG6iq8I^`|iYZd|}gAg2)CH8U9y1u^da z7&?umteA~*1X7k(qS)E$5sQxgPO)S0MZ`5%z2RuDEVm3AV{am;3RHHd&>Hl1bGObn zl=zrTchWc7A)BfiAh)N~NPBidrGxoA$`_R^ZQ$e(WMocBxS#&!CY|g7E0Hq0@X&Av z&4c@f&Ujj~&}(|fXF8tvs75H`9<#`;`b0TCUu7zJmZ`gT)9opC!a<{7!*Mok`sP7Z z+wFjOqltvcjpbdd!a`Z+vd}jD&h#{?BD;Y^g>|X@&abjM0cJfjo0~!hit0)>pP z@mHE7a4KwD|2P8HLzF zrl#xnu?j|3X^_D0-qi7}`^c6lYqRCtG#-8k0`X{44$`b?|BMNITHYvoWP zedriC#O&D)v&`{~HnZe{WK+%Prnl(mBq@>085Ap&_v^FXW2F%oKg)eyLzCbOwaM+2 z`WTf>or66=KOG$b$~NFtZo)*_@L!#u!>-RKooU{}q?cErJml`D-IP#WIU z;7~`;xs3^x-sCOF4@$y(UqvkL;}!-@#tkMllXh}&_^z2Ofw7qp{#kJ=*`)sSEV5sa z3F4;dg_)fTHs6Q&i^GR+fAC+pk|IynpAmHM8EgKmQncDhqa*-F4vuB&c~`+o-H9c^ zrfJTwW=*j_qEsJ~*Y+m-t=ySTjE`HxrR&zOFLH!2*~ss{gR+7{9l&NkzN!1A9`#A=W&FBkDi1v8RpAIguq>lM%$KpTy#ATBH?UoFc@r*OC zgTt@0G#g{}4J(>7M~y}1b;Z~9>>T)@3B3#SN%vuEk`jBpwK=pt)c<=Np#2GEXbP0< zf{a3lQbxWM&EX|Qi29K$t|H6`!THE|`dA!d<_5%*C( zKb{kM7M+D^0p^ojZUM!3V9a(wz6?JzZtNR32u>JlKY!=_F$ylF`AMZt^~v%Ar4*LR zFQJtJ%BRAF1XdE{0npPp(wlzEkv(8-07kAD&7yqM*J5;$9Cf zPJUPMBAS+_qVLLVkwQ&3@5e5d<=~POG^6Lja^}DlH|v}}ze%bJ_Xy4ZqeBa7W0ey6 zJ}QX?NK#8m-A}ag;J*9Va^T4jAa!upb+qwc7^Qz%J|5Xz3o59xb)p0XTzRZcg+;@@ zDOXYHia0zbEK9eHh(PhP9?z5Ca)qgUH1t#ikGfd1G`;kvw;|P)jT`gqo&GA-1PP|NONs-&S?a!uZMM$YaRWngfOcyawSs ztw)W^r#PUbD(uoSQDYQJ+3SofLz-v4`N>EWZ=hK`b%4PKT+u=^4eJs zUpnDW9plhJPN66vGNJc{U3A6}i)0n5i6+@@>r@GD%5RYo$da!;`a7xof6^blq0)bq zpPNgs^g2Lv!tIsX#&(XUGWsqb5V8(Cd*V9+`ZEaEGB}Y=52)iXO+#c9B2XzR3?93} zKVZd@WtG>6Yac?EnNEukO?T-LrM8Y#DT{#B`96uS=SZ5yW@bJ}T9>aZJ6ji4ymFs- zaq@*RN%#&(bqeTPV&q-1YZmNkqlSe%U$*9 ztJ4^CTItQh$<7 zv#X*_4t29D2y3Ub@gL=WbbLSA_#u?zcXndrPK8Hyt%}4<4ddFiG+`AbfcFnfy|@1T zXRbV*OUaq1WokfVcardx6e)*ncC-g2#md19b#whI>5xd9xQ-2zj<@vt$&WA?i&}Ww zQ7{ptgDuOWms)MvY1tCvUuJG7`zPBt)2XL%GxwTQ2do5&cZ;%qf7mZm_@3ZxSXL64 z`iWDJb;dkL7?km%SO=4sI^m@-nqXSIQz>U*v+R)8sJUR?@ETw{i98Sk^8WBi zC3RNrn`zlOBA#es=^#G8YqiC(-5SFP74_lvwHY@}!MkI^$d&b9#)l5shWENCo zdo%S%JuF;sJ1BfnM-b>HAE{tH**kQpVb&CC5$Qj?@z!@@*8zovC29D z(QOJ(PLoKeuF|6`sYs!`d#1e)r$M;zdY%-U?1Ex|l1cj_d5faJWjzWPioZh5K2k_# zee~7+#@#}JNHk;WeM0KK95NDeAZM6WI0}mkg;HI8A}K@nJq%(vH7+f`vc=I92W;IW z%glx9ItlxzFe>$=f?d?aWQrkukoiZkttE5FXSYxPh!x~v znkb=Ezke$g#%ZJ5g^zxt8a>n0YsBs5L?9RwtQ66+tV%qd=>djJstRxTm=v}e zj@d^iB~|bq_KpB+6WS*#bi<68_Q_!;586a_IYFWn-`~=MEut|Wtq|tRP@iL>zsUBn zevy*U`N<3-IUJ7B)1TJQ%pg?=j>|{<(99`b#>5^{+h%G`Pb@8bz$qQ%oc;(!iz9PN zDTyqbpluDcBKgf?FSPqItkEkOu^$me;W9yvj)U`iAttfe&u^tzaB3?weu8%_`Rd>E zZNI@77foD3+dPgxzhp+g9eVTiF~;bAIdv?^K&N=gSv&tx7h+`)faTEY!bFP!9*OtA zg=;o)X5bu9-eG!6V(LE}ZS}`4HSPrXL;O%>d3U5|R>W1bHq)82{eQErKf56>83hPy z@19n9+h(F`>p}@?S44kSbt5`9p{<;!|93IcnbrQ$*`@PZThh+&n(x?mqpovKuU} zX8qcV%7-6GNMfe{#Mw_4+FHCH*s;o=Qb*HJ&0N5uHbR43n=N6SDj(f2z3JbG!tPpa zM0ND3nOw@$c|@tdO1qO*jm%Uhmn4vS-+$E0}c_P7zQQi zGY8v&{b?ni^CANx90aXyq4e5hh4f5q+H>joB{HQZz#xvCC-V?jO{B41I0h?47a|Ul z;g8*v&QpcT%KY#=$D}Nfk~SCF+$TMbt?nWh3np=u`QRB#=W4|rzLbJxO=QZ`W$>lZb6(stGY~81r-x$z?0J+rJibb{D)1RDRp8Li>NWbk`ayDfZeO=I zL+(IZ&r7diVBg~neZ%_J<}dXKTjRSqN7j9ouMjh@^Xe{#k1R^$(reuw>+7nLL2GOA z?=|MpxYOt?-hadByykf#9qOTYIaLOwbc0W8@mRn;Wh6D;OLw~yTNn63K$0t_2r+of z^H;~xz~b`Af0y82-<=eZ)W1Sm_2<}cnivM|&sk9RSvQNMYMYB#CeHxiIZmku7`o#{ zZg3xbeP&gS;@cNB>AtND6C{W8c0=kLkaTWy^mwLFb(#}X>zQp2iz`Ppu&a-C zTgaw6>#DWBwW4OBjLIC2Sr-V8g`bAyzMGD<=pKe_-4{^|>47&VnB<)>0_TU{aU_FU z`VP!L<<&yPnD=S0dA}p|-$Nwdx$kH%XYW0{m<*JLc@Q`{L^%!es)yY{2( zYS#5ydlFqfgN3g>jA#CO$ePvq#*JH?eIIHPtMunwheC%$COR%(^!NN8Y!vSfUpjTW zou0-4m7PZQ?*8u=>rd_N8?S68S-W$fZF>%5BS{Ts8gTWVFS%y~*Yx!9ynRqtm6nv? znEl#m{8ODsyk73zq}r?z2Lhh&9Eab%+b};g3)-TD$125zneeY*Dw?w?L5v9ek4qmG z=GYL5o3`0xgft!m3k#W74oL4fbs;dwFx5ldx3=i>Qw`!2w}?;+P{WnBX+x5>b>&*> zmTwbEF{*U7xVf;QbqeL?!g*9!Y}h$1-|1W0CkNt7qtZv} zkImt9riIe&48#O+rM9E6-iSt0?Cm&p(smXFqOZ#ht)8z+Zrp_`T6$_n2;rWXQLKRp zQh1?fdQQLR1EOVnK(ucAZ>0l7F9A>Bk+jnwz8*pGH`+G1zvktjWqV}vPNwicG{jTX zjm?HVoSIEI8hGWu?mEBNd+L;CYf`3b5YgLCX-xf4AMm`W2>$g}Wa4i_2f{6S6m$S@ z^baWMI(8;ZfV!u zX4YyS1p`QWE?mKdQOgrm86h>kmCYnuk7loEJs0zDTp!5xT3uHIk$qN%(-H~!Ag_`V z-Vko%|HQtO)e#WJ10t7EM{q7=QM{*9+!#m~2nCtZx7XAedSdpryZ}#t ziHQexNP(J_m4mc_`*;;r>+i}G$*qedLUIb(`et}R4~LQ6b$WzW6en9>!E%0{WyfT- z)SSMuY(oWw8LDIexL<&~r7;hX)w+FK5s0Xd14BvBTLau4#(~S+HzA31 zmW4^%tclG>QUgJcyon-wQJeI-CWt5Q#E!>!@u^ig!rJW#+JV~=rQ%*CxAfaWpvt}P zIX-eO<@lTQj#C2RdT$=>e${L7Lq9G_<3RfRyvI4GJ0VmF#)^|gZsn9QMut=$(Y{Jd zP^|l|nbge~bqFNK9DVw0pntz|`;qirp=ZBF{-7Fx=eik}ZVRp`gbyBFaHqIAFYnEG7 zGx^32@bsP=AIZw3)-+s(#B8Krvx= z5qGH|rb4BDq+w@g4;&jCi~Y%RBMG-C98__S0dq_qMojT!Z zW_9uYz(LZw5@Q;EOc4z*w20s}(JK<#-2Y_B(k}W#s2iC51L65O>cOv3Fnqo!i~OMa-&YW5jnUR6VfwX{r19<>d71ng#h6k&S8mNqA;A``chE+OPA$A4it%iqu=u zjQJ_Rs1QEl3mo+QYt}c14N)xxKmWA^$y4WGaV6#Ok5mgH0KI_0O@VbR|n3 z;QLRYcmVpErw06A30>Ubw)?PP#E9Mn%GyiM{i9#yy8oD?OT@Q1Voy`hB^qm9_wGSz!T?t;{?q(-G&tuOjlK%MinOlidQqe(<#_M?|^PxJw_&&8e*7~+a})y zV^vw{|6D(Y8*qTHP_2vHwM{>(e;2e6HCu+dI9r;x*G8B>3mQG9Fc~c?L42xMD<@kn z{j2Jah3r=vS|gf=L48Xc#Ic4_)_7P!TD;JHj)ogEMqLE|Y3#H!dc!cGmd^53rMHhT zTh(W}`ufCE5mP7jrgbGRj;_U;BE-zG*)RM&JeoO4YBZz&(>q4=YP^3BeaD?XN94vB@U9mXRWqSyZFakjKMJ~KygFQ zyrdQqAo?D-q94@R!eeR2!vzaV(L2Z&8pg+4N;a3BSV)u9mmGcn4=?}b^gG*;)W1V% zYo|(k9HG7)(Pzd-{s2D<2;!ET&3UFq+mg-aps^uNs63URNj}$c!w+&7H?*hZ5FthcqZGE^%jmTIe5R$e z{qE%k)q~Bf@Luq6c(*7`)B1nS z|Nmd8fBCq4II(gX`ri!^GUb?geApA_(er^?YNv$C&+(2enN$iF>^<8;2ATo*L8C1` z0u?A#Gx(kC2ONStv821DRaGWyi;Pu`Xo}mW*3taTGBeCvw;u;0! zx4q+rLN%bBms{TbtRCmwjA5FhEN)eIy@@Ui<8^{)y}1b5aeb}%R7YyMV1x3x}+e062(Q`Ks1e{C1(($zZz>>h0@{RwwufT`6vZB940BpL;y zF@VVOZti+I|G-4UfJs2_h+SADFz^*C=5Xx0Tysxbr@dL3?q6BK z5_+9%7?s6Qg5u8Dzi1JwtFzxaRe~Zt6#N?C10;0pe2oo)`k0{-72}}TuS%UjVcNt&WEll$tf#x~2ZA=k9?t^slJAw<04jzzFEB_8I(UDH*fGq>%z?H~A^! zO|uE0w0xnIjtck=xO=qU{h7hY~kE#J$L8#jnaFCW8EiO#dKB8iClH{cF>f3Rg>8-dZ~JS zXV}Nmi1&I480Fhv7cFfK8cnbQ)jcWe!ry5s{>!=s$y2}Qjg<$L{CpGramZhzymhh3 z_`vM)shSMHv(#?`uGmj7Pc&-mwQkcKMm?Xg={xz&ufaENoysyb<#6Bo!<%}JZ#Pni zsu{(ZFZI-vozlwL85*{$i;yTof)ZO+PnJMnl{h*(v@$X~W*%qY&nd~SuH$+MO`pIT zZ{7w<^k(WbNwU~B2Vo4!-B{naNniYe%ypP_UYvnD45=Al@r3Ss$jq=2`%!XrwNH|I z-anT#9`$DxnDxJvC zk2LW+-2-0NZ7Q9&XG{6T71j&|6|D3c^k1C*tdux+TEf+-C!S92435pWp6zGUdAfGk zo_*Zn5PF`RNG+L6%}q@mn=Eh@k%4xytn=71PBEY$$rNMP=z{#2BKa&1SIrH5XEsu& zN$NoR(I|%C_=iLB$=^NFTU;p+6on0?_|-K#0QUv?p0w-6#TL8vZVl11bJC^n@qR_; zH?r#}+ZW85awBXwNqhx`7$@N#eGmMuuIC7r{mc;)p)?s=UnY8LD&qKpsQIsE2|vgK zE$A^%AL7Y|>yien*8GGWR$E|0$NYhyF-xbDA7)mz*0O2~q+t)f90sNRr!5=7XSWEG z3W_eLeQtF;*soFV8IQhzt|UU321ZV&X`fbhsM&rk5AC-+bh7)`0o1GO5tq5*SG~*{ zHoPh-_bZzIi^9tMrLev#H?~mtt(v;q(AwPtN)iRBmbT@J@v&weKID<*%$xS&9*J%hLA5cTB(HDkE&aJqtWm)qRw2MS~12vo{586a5Zjq72#ckOd-A^zjKLc6~ zU%Z`flh?fPw)ijNpId?o{}Yq?>%D99kkpf*MrG?h=H(l!1x%sLE<#)CY;6hb7wHgn4;}@p`cJ`k;5}29!!Yi%vk=?Vu zYuSyXp_B~HawQxP#uvPSr31|uA8BNiGnm(KWGO8TqjBMQ!M$m72hP`{l&IG^GV*!^ z9^louRqhg0y~lwmMy-@x035U^a6`;+AG+1&u2LZwwvYwiHMo@pgbyN&R+A>p+ZLL5 z#Cs{LAZ)nk4zxaViB-qjZEX3Ktr7KxYOGY`vu}=rK|(zZk|&^0RQvhTRWs+p70LlK zb3YPSeRSi*tr{YW{DPYPQ0!U?;*j$HB9|Q`FP#cbfpLPxUf4I8{Jz#lMwWLGD@6g8 z#JU)C=>ww6>NwPk%&h(_2ifJtnQn`^5VON$pxmC3V56V>1Q11zBw5*)>ZwT~;c>}N zz}hoYg-r>z6bP75Db_aQ-+4-_`LR#6wAnH_$QRH0K5+XyhC3fYOcW-+I6YSJ<^!yK z-B@i`T^`%@a^P4ci<*T; z<$QPn3Bd+mgFk$005_U5<4j`JCsS?p_FA85c8L8*k`TBG$?kdzR%@)3sjS1#fEa-H z9K}yKn>rNK7^xVQif@0%95Q~Cd^Sd(up^2sdyD=57JT)F(Y_hC+^%ALdc@Ro0x#(5 zdW76Ar~}1y?cAjenDTVn`+B|J6JXdp&1uzA+-09(9zog1R8(w(TdJw=3(@eyl|kzl zyyLHz*AV&nBqv+f1s%LLyULQhq9oaO&Gr<7c?a2&;AQ&ZkrI`)X&&U$qNhx}Y;#?# zrNrL0VcE3i746z>ZbDa_rg<_bDBeGEQsU~K4Fj^e5)5z&3{f6{ILmO~^8sF--R)|Y z(jt67f1T^@FQFF;#^3`hYDH=trrp)I;vetP-JR`jruQ!>9M^*4Hv>@Ox`o2@M1`Y# z+E*1DI2L0Xy6GGg+Tre0IKhlas|0cKwP>zw^bQp>prB#q2bX9F_51}PMZulJ=2x){ z|H=I<;j|P9I=o(n7m2O_J?J(325s%a^ATjL;1WdI{gU$wa^yc~Yqg;rmO1%%W|h}j zIyUhPQ<> zdonc~jhVK*aE*>5FO;L7|G?cMdWYZtH9wRsO|P9$gkDhD&+ipVBJiapay)}fjg!a4 zH@(ra4TeXSF zQ`qPZ@Y-y1|-LKO!%5)K$dc@zCzYv zrduREEiq2l-=JhpuYZoEc;$5*y!mym6?T8m5B&3+2)L`p2GT<4sBNo4?mEGVrWyvT z;PfOd%Ahp-7H~KYLufghe=GEL#luPA3;u}@;qL>wPA-4`Vm=>35Cvbc4A$MB`P|!B zHi93+KzG)0IJ(fNHKMdE`d;B%AL=oW!_C|3QpEYgH}PPE&y`U)WwF}$K}K<%)U3@r zx#0;&W0!LeSaq|sRl{9~X(=CBp4NE@`S^XlheXHa_cbN{wkWyXM&z`1&`hqFWABGz zW^Cl--T`vNTGdA@-U=mMoI$`KUyAV3F&uFU$DaO;kQF~;_*5sDr#)8C3A3B={ga<#ER zw_m%>t!txYx1}nOFvn{BE^9F`rcOp4UHtGJsVZ|-kUe=wx!aj*(_xolsr4z`a^!qQ zlE2f#m&I@%OLCY&BhzDI$~H`vwiU{V1>4=4mxX&8OO%6C!JRSAtHC}P$2sG-wgw6y zyM)f!&MUE;oJ3`-SJ5v0585ag?kX)%gj33QAx3$BPLv(@x2=`fJKm1S1Fk>91Fbs# zp5^>uZMGh+!Oyu69uCgP%{l2z($UoZ*vUaS|3?Gf-8BzQ(7cgADEQt~barvn91+KX z8hgnXVAAKsE(PgoJnt_fA}v5()W(zcHNak5t8g0F%!1s59mS?7$cJT%RsVca$t*C& zpRizOAMCbR&ZZC8CtgwVU*c@!8LB2eL$7Ax~LCJc*(e_fVI9P53r+^yh;Oy{QEu?NoBqp{YM_&z#>~z@>Zy+*3 zTp>mk5Ua9lOP!L=NO!qHD_MnmZDB#{JUsXrIlHt>Jmy59lXM3xhd0zehtbTM+vQLL!dr* zCe(|#!=V)qtqPWKcjA%M=p<=sYPq{4+ZIK)joxG3>7C68Ej_8KLg8qwyB3!~hk*Ej z7y)oCSkO-sE=Xf|*Sl%s7a=%yZ{th?UXi)H*eg~W`5iA$e|PJQkRP1my#}%buLo?r zilUP0R9@~E8x5Y4&H!+>_m_I>rwf`;nk&-6jTZMAGkG9Zyz0R*TjX-3Pd3$jKN0ZS zJ)^EoAS{L6@YP6p3Tlm)|Du#SVW35ycf_Eg0TP{U{WNJn)>_RGL`@fdfudY{mQL#b z8{I!ZK$l%WD2l?no}gQeBYdahw+7VBG|mT$>AVmq-le1R(C4avmSDkX9#@y5M|c^aD~O21;uSfb_o~q#pSel zqIk?RIcMh|j?FQ@IOGN%@NGcWTE5JEx;Qr~^%VhYOFMbGqPN&*4={1vKu9l^mB$2m z_vXlEwtMCWE>3-q&Tq-GQqzb>z{1X?GZY4ce!O%Sr(5fhzx$bAb`I4bSCRIm74g(V z^#ib$fe_ph6HgPASCegA`0gBk7Z_^$4tjLXtn{~XNEdB0=-W85T#)Um(^gM4oML)` zmo4S&zQf`wtGF-Dcl+E9>pvrCkpu7Sat3G~my|t>N?y|POLFdvQPIl8<{F(xM3Miw zK_JNxlST~Ok;U}7hR+7>PZ4x>UdC=RSpf5z?pB7hx9_#smBr889xfG-bBK$yM`FdP zu5B0zcFP3pRxGkrDnjYIfC64{hni|Mi4w9)$$0Ro*owJLwp{TJ2i72InF*`Y3eakR z2jubdmG!*zalubS{Ug_gpdBf(4sDYdP`Xmpw3i90PJO=^~ zDYelHL1)IKfwi-v;TmB#6ol!RorJB!A%@3dSF;Qw-z|}o(y1MS*FQAfb14Oygujv; zN=L3lZ!Fel9dH!v%b#F2AO3K8L0&V4=;>}ZJd9NziwXA7S9tCct%7)|YhC|Yr9tasv; zaTn0nopD}Xwx{@jUWhsOA^ym}IZ(gz&5}NnHL?dYI>d#z!96lCuBt-yyo8Fo@s^wy z)L8c>pIJN&2QdKuTrhOntD%ZM!rp=6s$!>YXNYecx7?_^uZ((v!1s?>3QdjkNiMh) ztwqoj{(o3ctP7S5>h?U^cjiaVyUn5@ZFhcL4yC%iHBaUk{!j2e8w-OM6q`WmZd)Vb z*9R#Ik-L=ZC)=_<<$y<9XV+)u&5Lmwhpir>U(yC%fA*;%Ym!`EU_FQ8r7xIwaUEfG zAhw6|i2?mBcHT%{aMiG*|Jug#gg@L4ZRgE?%0*sJc7KWs5P!oR(gx-W42y*BbmR;7 z#c-q6)^NkZSogrjcm^ZcZOnyk`N{GQi9Wtdr0E8)8HH19b3vBr;z7q-qq(uJbu)zX z=0RA}K|ByNgXB_@e}Z;sGQBjR?i5cQw%PM{_4;=c#Yd~@UsOK9o+*U(3hbg2%-E{U z(UUVf3DhW-#aPNDq!Sm`_LcR0&@nb&VpvZL95rjgRAQ?FxoZ;1UwC*Ks9y)y2Q`9f zs>pHp*1HVe*@R0ob*3XOf4d5|53nV6|C1|8u@nbFEUA5#4X2&6 zP3~)r8lOGtwC@0Ox9LmrGLnUxN9+1=1f*ZU8f?Eoru5`{_QP4ohjRdp4%_RGqNl(` zO^GM+>vcbO{geMjYXFMB@jDG!){_**YCWw9h6e4)9b2~8eBPu3neO$7->4Hp{4ugd z*-b;|t@!U7w0o)yUP!q)A}sZj`*&*n!Ve|xwCrSSZsMOSO}0*+67DDpElsFthdCj? zsV&c!W|T}tbV46D&xwIgeKOTQQu=J0dAA8R{V*TtU#66%Kn+G@y(MpL^>9ioXSs$x zwIZ5iA;o2yW}r4Rc~q6kr6rCxq1z`&RHXECaHuY^e(m)pJcM>qN zj!EniL>n)4<|uKdvdCIv5Qox0* z!7EDkvXVN%7&xi#kgxw3U-A*}R>=h2r?8vI5@Tf zV>-s&eSfxgagozQ(etY4$|o>-UrsBU!4gI~RNuK$?`yc$Rc0G%YH&80x^jr>zsDB0 zQ@te`+9B$UZA4l)Lee8z)0G!#-iCy54Csp1vjuUz0+D&2_mS40i#VL*B*LVHp4Ty( z{Z0taUQeLVoyY4GCC{w(J)Ncj*m;E0`pv~g8@n+6C$6wwz(JjXXtwy>m#sg2o&oNO zwg{?M^&a=P0tgm8gi#lZylbTe(f!L>WsR|GXOx+^WF=6yFCkg2MZ%#c*QEZ>H9qHA z0UQyY&pQlHF9pykAuSF=iey-w2h$F2t^oh!Y4O!o5LBQPN;al;V6j=@%4Htu7TlYW;atK8iWVsiHc+_n$yUmP9 zM9*;Z(Z5yqhVq~J4HYEyLELR`rM0ZIDPt!qxDfqRQNd?U1f?a)nUoC|bXQ=NJGnyI z3PTJk#Yq1;I&Qt1{@pQDqVL%bdWIbHtVL8u4r4^`9+lYG-LB$FE)Utn+c9%HJ^X>a z!&#lCh^+@zQSu#KAVpUVnd7qF#FSb3MGzQbA@HLs=I#ln>o{si5d;8>TdplGyy3Gk zEi@>w*MpB2vc=B1^NZt?yG$ed=LpEwhq}lLxc&Ry%Pr5x_QOEFrky)WEl*MDv zF#j7NHbi}1n`kY$p~`Z_{~&b2rFo^2XC#i{%E2xoWwBi$O=~-u=2m4{Xwaq86|qzp zA=VWJuZD#W(8u3*?7ylci&DzzCD)vNH!Z?HNePr+2wzM=mxy zh-b$Q;=!j1dCdZ;CBBKzFOdOn#DJ=&a5!Z1W~E*3ne_%5FPKFu6xw{9oG4=h?qg4T z7ia#bd+HDfkRsB<$rbCsG!L-I9&apnz#|?^aK|%C%pVEpL9TAgbV_b8#KG(NhhLSA?OtDdQL=-xsR` zX*c4BWkRk+#m2i~uVG8N2L*wD;=p#8f9)839NXONaaM1IzCKM7a;`5A0OZl62z*0W zx}THDQ9*x(=0CloJ$UM8NKiyGu(Df|*kRL_uK2D&K;^`bSV0iIrG)KXt--Jvs6UJSnq;|gp)^sf}kJ$mm9YQ{O3#f7To7P z%&Lw(GK~`KIdsuZ@}I_a{9$z(u|ZK{D`eTC5u~&Iqcd?u*b`XnxAJ{MH-mNJaEulo zpQk?ea~+vQ#V=k>(}fQDM{G61jBU3~LJ&|mPFnlP%!1CaUL1hu%~V3uBP1B+#dC^)XWg0 zn!>5<0Q~LTcTP|e$YJ+N6FO8-zFBDUC`bNhGcv#+!TqvigcI)hclFTz%bzyBfRD)& z0?U&_2lJ`bXqQ*_sLIT_mS3Q`?BW+pW{k7ChsNYEzE_Q9jD9NC z0rU@QZ6)PO{h3Eyt+NUHY-sS$RH!<4P!#50nPOvEUp#w6RF>|+d7 z%@g2F+CqiG{NUf$1>br#Q$gr$!l2UCwaFobcXPjf9-=a*9#dQmW&OlmcI(Fr;LnV( z*u{!%V;I^0V0q`EUCSxL8o^f)aL?QYO05b4JZQ%^O~ZH8UT)$$UajyqzmYp}UlB%@ zH^=0JfXBziYqtt9U(V@fEYA%bk(g=BCItZz_lpk6M3MvgUpIp-Sa$)RZtD%?pQ*4V zh50f!qgd7zTP!2y-OP&V@hkSzLsz^nNq#GhEEHIkZSMSB4-e3!HT_wi0ppOL{p@@S zh?l2LyR`E5tCim*h>F_&bC|Szy+eKdb^Sz$p2Nic{q`GN8ue+-yrS7ROx$ zhO^u#9tF^ErgMMf|2R~c?w%lNoEcJSRQSz5#pi;ohf;DLS55lzLU%UYvP>Mxd_Kx6 zbFqF1FQiQA6vd&}u6O>HMb^!6U6wKyoNYkJX^KB12o)$dkV4WcR&7cqKtCP29x>8*$mfj6Jiz?Tq z2BXP(0|~_Tar3!Z%cJq>_n=vc28gWC0VhR$Hw4pB@l5{vyQr0eza{qD_;6>4${ zh|0~mu(AplMh(#PBhPxC7I;GKIMgG$9=*?^S6f9XSJTb_d@}lDJ!0TszhqpE5CIJC zI92_U$#D@uMMQ?)mf`69%zt%Tuz~Bj+w`qg`j8KgN6C4-u@U!FY9+k?iRwXiGK3>Y z(8w2x?WIPk6Y>dt>o(NdXIZ)x3z2A6kkvGp0KS!xHwO^u=sRM2zAd@`j!0ZjQjr}k z`Djs8bTvHu)fvWMZ5J|#L+u3-Bin3iA$~pUsu`$i$reu+1RE~Ni68iSculNIFoT$0NDa16jv*mkB_|lqMa>CDf34HjEuTkHuZ#cjXt; zo_;uS>mCR|yXB?^*t84l&g%);^VXZ3h2mD&MmxBd1;?~OnBdVo zr0;Gcw57+JIN-KO=jk#5zA@pj*4ME5p$Nk^PK|Me9RA<7VL=*5nK#{SHcB7<3Z#1< zK70;>*i9auC#{(_U10F`3xJqZ+#$BR4*?&%Rjso&v#l%e&cyfl%hct)q>dDWY(TN5i zbda<=OWG25csdT9sEbZ0Q6&^(rH{XJ2)K5bEI62C7RIH%UXFHg-4T-_$t&4P+d+MK zsH1$^ozFaz7mv*;-G>PPKm0~|+L6pSpY8V1_cVTIIaJT`JP(;*L*EYX;S8t ze{1a}64nFs!ZB@MfnP`3{#tO^KqLujk{9~LaboZLhr zTfmAt+`RbCQKZtnn=eF-b#VzqQAvwJw~oeE_ys)9sI^;X&?>VhmKpEpu=)6Vt!MVD zN4z`0&x3LbeztVvt4869dhK}RMisxoamW^Ll1bQ-{zb}m98 z+G{xC+iYz@wr+p#e~Xv#K(bTaPur;?%Q?eEM_i9dFyF8Ts(g-i;9Y$`Z9yi>us$`z zF-Ll%-5k`Rq3{{^_u`tHmKj1jxl2FG5cBYdU9aaw+@;$IkdY5VR9g6Z#koq9=0?Nv zFEfl^+&HHEO41zQk6IZb4U{;i%12!WP%QwMMm^!8H$clQ5D};053OdgE4ZzRf-d0+G*V z+lwlFReku_8vii`?*|S|jS~WJ>CnaeS5;e@F5-Jcc3XYLFQ*?trx~Ep#39ko>h4)4 zJ=iC&!OpyW-M$27b;+;TL@<|#1OVjP2I8P zwnwP}UHPA`tdbsC&s0~TS1d?SYcN;X2Hz?uH++`enW3FX ze?~&q2kbS)-KhBwa!)8-+U0AY@m4~9GWCVw(MGIX8y|OLG@rdU7ymV%efnf8XBURz zVXvF<92onvc(1|ZvnZ%N7|RFMMJ*u@Rb*= z_n0a*J$vB5sF19gS7!gVQVVU7Rj_|nYpN3N-|YDRrNIS#qIFV)7rV+%%V^Njemk+C z4+Dt9iG=HFsju+O&#v(OWs=Ehex$DQkxA>TPXZ2P_ZrbQ|NKjTX0%zFi0gt`tuL45 zakqw=Lis1SILl-Qa*t$(_V@`)+eeP9hEE)QsWNJXgm$r_I3%aj=s1ZAty~>D>||fq zRvNf7$|wC?<(;dUL7mfIq!kJ?%{Fdu$4)jg|4i~vR}b_Zunet*@&sEpj99c|MtS5D z(&17o@-?63n)8OY`F9wfH&@1bsLzKcPDfP7@7q}_c_&+b@5Ht}-ObT$3YXA1m#fU0wB*YMg1nC0GpE;WerAMOkz>d)H8@ zWoF|FZS`1@6xCkx>?dl;@)j}f{&Rd_o)+$uD2O6r>a_~(5%HE4o zo(n?~*x3d@^Jg|M-ss*heYyg$X=S6-9b9D}14ZmRv`JD1ZrhHU-HhTzYnU5!pT>bE zdQ3IZcYP9Zl`e92%$Cy5|L!yX@AtP}yKmzB5w9$a2|a%bgV9^`?%T0HuKzhB%=s|T;N#yVP^?+ z8aG0+2;Pr?iOpK|o1$=nqN$8QlX1J#3lDys^ihm7e}|^<#El1#0(~N@BAi<&N!pSj zofhF`&kNjIu=i!g$hF=U{4LCE7}YwukyK7Wa3>UUe9d<4_x&(7dPlHm>@*05x#H&k z;vW7lb5{ER4)7f+b><*p$|7NA?=w9v3{vPOOPw}qq*Ol{HsaEKMCJCr^t!44!4@=0 z^yUiqrlk=!U^9BCO{cNYh9hx)*X}@Bm7gs}3|Fy^UUh}Y*q+3W-fY?vtK&gaW}P6+U%zjZ-Y3rN zLbjoX4M~Sb{CD1;wdGC4ic94mcOBZ~{EOkE?+W7+-?<+zhW2)}=dIB?9vjqZAg>N95~6EhgeuffI_;%6 zq743p@gg0^jrUg-G1)XZHn8u!lDae(BtGDhIZhM+h58d^79n?PS^bUq{b74Vl}UCD zP41~r7Gql&<4inrGax#eG~{LEO!ZjB`(Ji_*%%JuG1`;7c9J!;O`)_RO3~8SUVio# ztJ@jF*p|Kdvb9@SyX*j1SXf*LtxAN-bw#WIf1aam@Ywd zeg;BFoI%`nrjf>LxauYfW=aGaR?HjK_=?$VYkb(2-bE?5TI5C>ylipLk1v58d?*N2I2*v2{|9K>%oU=2OL&vp@5pAP?p3LuBmC?|h4af}k7Y z&b$bX2@HB#S-ZBR0MCes01cU@F7Mp}Zp}|OC0b*}Csm0$jC35Dgz&+S4oltoZC5tl zTFH88GiL*NR%;q0%DG)#NFD7N>#G5Pm34onn+|-0V%vX-MVo4SL!!LA{8NDCrL%mS z+lRk2<@hbJi4w?PeZxC#-Lrq6VxW-U+>K}bLcdf9`j`sQ!acUBu*EDvveU@Ba?RgTnw-rS*S0&k~_=>?C`F&p(x6dm95DyD;dehrByjr-RB9UicUPdf}*(4P07Be zYRxe)FcrJWw50>lA-)=8>}b_yvXr3-ke7$fY9RZX2BIsda$iVuS@pPDnZstGX1we@ zc{P;Chm_<$e;80E=!*5hf5@z)dIA>s{Jdz!#W{WMI?q1fW|3*&oWJ$#@?A}7p38}Q z$Mp@BLsEEigv%9;hYi=W@I4tLsc(6EH#aMSPA9XbIo`SyKCw9D6$d|Znt-cjRVm=M zz9I+*J#%}IdQYFPlbwIhp0)t_W9CW%I6>>y3eWd4>dhzo$f)x5AO17UHr^hEp!Xo=94SBA|SsN+^f9Y18 z3g{%R72Z=P$F+0+yve7~rSemauRY)fyvS*O>Qv3KckA1l2~Kj;Oc?c6G9u(@riguX zI7dL6S1o%eJ?ZxaV=Zg2@Y>Q}JFHEqOdT4>MuvvaE4|XVzDLp-FAb}&^!R=7uF;^| zpDejONRY)v5_RxhLdhNaJQK%2!%cMOO+@4CjfNtHHs_t5ZgL^<3)>C5SE?#aYcx!X zl&7BD6*R&ld86+!EOxz2a;f#W{OYROHTBzrUMzkG2m0)hs2{C7BKeQR?kHo|yFyQB z2+GkpEi8RL8{Qv_`G_7M*gR5J3ugeb)x4~u*wL}RYnRZ!^1up>uy+P|@EYAV)ROVj zjNk1R*EOyzEHOkIOG){ea)1*;_MmmTo03$QSnF~#!nUcFMsST5S=-UEdxFxzEcG57 zodCPonum$I2#;cAGz&#_`K#TzR@xmAc=Qm`|Eaaa1xGpp-PZ8cYne1mQ$`a`2rFYt ztGs{)uS_e`;IEp;LYwa0nah%zJox5S`mDBNWWB62@%-MGbvx?3&HLY_v#KF#in$Zs zC*_KSl}QsRaJ!8H7#Ag+^2AJ)9-2!(|9zoyaWV7FsGPLX(CuGMueAxKgfW?vra<5W z-0EISyn+oZS7V5+d!vL^UzLQ^VW}WfTusxug}EW=fRcF~*1)N_f5c#)s2ELv)W$7b zwn4>DKm&G1(FCtY@bkv3ZJg+yoQl$r;pnWEcnYei;7yM;7{&)7axQVv9go&tJ*2?; z{bXO|$I4xgAnku?o%PnryO?8z2P-`|V6`xgh5SuB@=nHvkkf7rHAsp4_-ypLB~K-Z zNS1-R(RqsUvV}O`etn5oC9)thm<1FFYmpADzQW!`F3iv@C1Gg==&y@qFB-6ux+ zDnU6Z8>uL<$AClrC&D;;MCf*S{M1fv&4{S#d~=h6XmR?$3&ShwsX)Z1d}g198*#Dz zDLz&4dF+>*xnHxby>zkj;t%J@LQ)rWu9UBSgY1AF&&Epki|^Nab9Pck#plsMoylvq z*a8Bg2g+QTwl$tG2VScdVDEm5DPVdNxlkJwsd|2&RO-~8i`dV=_VYakDF4m{B>I<0 z`Gwvq2e>Fs79g9ZtATr_;r22&+7f}N6&rm}Dj7gDKSvyzCT-=+(mF{M;|qp9y}Xkr z^`-@sm@B8UI>aG<-qb`CX#|%Eo_yD!0@R}ymgP0XC%`-Bovc+1ese`NqK5?=)_A+K zKjhM@BYb@nUm~E>wG0g1JW*>bX$)LzUpTNi-J=a-i-+789dICh)z)}IZLOM=0h5ER zhFP_oqH2`liLndpv$nf2azJf1d(JTH-wX42Y?z7E3G`ToAY2md$PRk}XrKF@h zz?$ezEO%$odYsym;ZbaB9ps}p%z0@I=wmF%#mybBldYmE!|vReCYg$lW^1>XdHj|L z^pW-*ZC!%7 z$;LmBb^ph7w3P;2lqSRB@eT+?3U{ERT+*9@D0bjocW#oah?Uo$T=(@iY(wYmTvJv{ zm~SDc3!?I^s#eFSQ8*;}z`jA>#kubaKS}EBQyE)#$9MrUX``(-B;n{rRl~Mtda6W~ z1(6w2b(&bM!HacRy|*h$UHv>RO;MTAhS`pg;gJ!2rT^4C)2`>g6CShjCLFZO7x4^j zRj4%-z8fpPZL1M?x=Y(`%&@j)c|jiOkH+wmb4#58!1{e%%5XFf-;udwdJPW`lWMcH z>@IeS18{aQ7~^8DPTKm0o?Woygwk+fe#+fj+;F%EtN?~Oq?$g zJJ2pR3E=-eX#q6rvW)4>VqQskAF`dPEyFvw;Bse`-CIT?7P7I@ssFb)2$ zd(Yva%e=$WCc@0#6b;1(#R_MS)9eo_S=HipR~JnpuX^0>t}9&-%M4RatoyfVc-w&? zSw?z#>(Y`CG@vZjx$1$wA4n-31xdq*FCAA?c0^oDg4q}8@nG$=GK05;_xXYujoAR| z5Y!92fgK5J;4>wJb;k#9&5rQ0C5k>69*Ft6&Z-iy56n=dH2@j%zcl!Gs=5S~kku)! zq6j?PYE_I*oQw1f;%%d_Dgrz|=%o zl<^2>W}1!_7>Q)v5?4HFdKF>x+LxRD)?hp6I=g$QBXc>_%~NP1rq=i3w@w8Y*yhbj zi0S>|r}z`cWSw7~fj>10i6)IThdI5s932~rPPJBRKlr?I-)1Z!XZ}V2Ipf?Y#Iu7$ zB3{iuwobg|Dv*6tgvvs@{F7~}KX8ReX`T@a& z_&h^wZ`vD_$KR6j$qWGwN}E`PKV)$@z#&~$sRd7*v3hd@%qv)TxUp4PO7;l){X%IQ z`xD;uW=MJ~x6qqTYqp~Q3~@zt6vqL>U(F+AjLbeN9~fJ5@w~a&*?4`Q^0!Wr&syg^ zc#)L%+<2ArK`j3b8vN5-k`5Hf2bA~9tOECrc?>gD!g6vn=|Vv+P^fPmxifY?nu-uR z_GLmf=&K6rsMHMZ6ix*WdQFLak>>1G+0S~_l8+IH!y=)HG z#fxW856gJjADLHLO7run)e&3mDbkSXHaY%2T{+p6WOaz;*Rmg{*fh}%Qhrey>P&yI zaYTH&^J2$U3#YRUp*E%U$R911<0+drHa3Lu*WBQ}ri5H9N0)br#yz^nz^An)3YPl5 znvHXz+3KzO~U>aU>6FCuK|0JuPg*`>IDz#S9Pe*^qavH7ot4fWHr)gfV$E6K(# z%}UN>fPH2$DpU0DwebspQ1QC6T5mStFXN|d?yaurG_eadm-JUUI{6q@Pw5csq4%_r z(pW;`*WlTo?ocYaS!tTG9Z}i8?48#M8C(nMQPP zaw&L;VfE;wOeJeJOB&|1<9Z5RFR`lo_@0oTR|=gqccK|v^>L-H>r$Vh#t$z1IJiR? zEpV#(N~$h9oy-pvwEWx?g!oJ#WxwdPK#y8;^jJoFL=&YdT3Y9W9`&vs91>^3HWg*t zP7*p7yB<{tkILj~o-H-Dyv)A`gs*Y|k;oIZ=4_Fe<_=alyMa&MYG}Qj``9Ns5@)6b zxdH%(G%H;s`=EtGZP57|hydr@SO?4+C)-W_7Eb$X<(rS0$C*(fSn>3d1Ld^J8jFqO zcv&!}BRRh5wP^$~5;OMEpZ|EYhue_e#zud76#|n+Z3=q`9ym3~nW&%HcbPZMN=Ib% zOMGeMO{Wl52lu?>-lsJSJtGhqQpAj&aNY*vbmYBEPoVvOwD@Uvu6<)Q{dvZ4vju2>{5<;RPOs$xu_Ys2w}FPbr8jNE{s*1-2mk;8 literal 58783 zcmZU)b9g0N^9CAc;$&h^Y}*r0Y}>Y-OzeqmPi)(^ZQH&(-#O>^Jon!Gk=@;U_v%%( zs#aCK^>(PNv?v@j7BmnL5S-XAA$cGm(0w2v5Q=XQfGd5ODc67xFb4rK#czN=&u@ky zfOAYoVO2*38)HWoJ$oY{6KfkQBN_(-dm|%j2U8ozOVCbUAfO*WVnY0iuIXp%POc~_ z=v~{J=4+vdYxSnKN&4Vn=tWHm;?+CC;xH&Q8-z5ndV8mn}uU5dW=rc5|^~WX4gEB$k6B7hcL)0CBnu#UGpg?{*%*?Q(a|3Tu~h@geVA z)s&hzK)tgnWcw9akAnMW0#kJ1@;nnLNYY((*G~w?eiVeVNn$-N z)|+A zm>cQ0)egU+I8l98j2%{T6jd^A3Ljzl3szlr=<>uZCBAm$H0W|vaSM$If^Hj>+W&67 zBNKb(LUTI2!felz%t67HY|^R+pP6BficeRbVw^g4*I?PrhG{Fui5RKi^+e$2U@FzQ*fUZ zoG%3K4#Z*O@rhTLd6mB`#Z!kf5GPMrozBi>3icWsW;Fa6+79T`LoDguOf^C~{plC09OzgEbkjcknQ^?@QI zg5Z8U{Bj$}L=OSkF8%a>(xrS!*HdFcXC;&D#uKe;(&M| zIEd_UPZ%Bi*pGw#I>Rt9g_uc~@{TqA*_XoV@G->}169>_5{9 zC8qF!6ksb4^~h0F&kQ50qI}!&vbTtDb;9=@nZ~T0#$fB#fJUK21<1GcriKm+EaN&? zxl7erl;*?`VEu8zrz}!?nA~)0iRTL>Bf2`Rz(CU`GTE(m_XRCV6SM#u z#Y(F|r5_x)c`DWJ(`>$aVaQ~ESTO?3FJ#P!^rX^C@_au2gqQc}DL&8sywDMc+22bB z)-Jg3W4tC2a8>a@VsofY<-KtK>lU^jVr~$Or}sxTI)j$@g4or!%q*Na1R2^$$ z>kL>xA=0ppv9=H7W!H%;)5SMW#a#MjOM#|sk3SOK11d@AfS+t2$HtpvFAX9vU`Jk8 z;`duA{;i@^1$z=*ydR{CwFfw76|FWFP{t-)t~8#Md?l-jb`70cFslnM5J;hqVBbfE zS(HT1^wyIYjQ#rq*#;kVH%iJ4>^WfQzSki{SHIdxAS)&BrK-gJQE#Jn{9yJ52VN@& zv!6b39OD|*TX?>idH&{GySswnSr^|+J9|?rdGq-1Z=Rl>wssXF2i8ezj+Idk6i1Fh z8m5*9A@Q>&m#^2rgdFVTwLCpNP1)bEIDJ)p9;7dYXU%!-oK*fw#nHwsVe5hsvd_>} zj4x1K8vMHGivY$+U5*n;7~NK=;8r?uG1{od2w@lhOg(=d=i3evWJV`r*_mBtn;Nv4 z=3AL8^UfN$$0?RgW6f*awfWWCF$kG5TS+nR>3LN$6VD$mgJdX9k{I6r@HiMYwfuS zcwc@y4zSkDD0Gc)aAs$*2zO|$DCVjC?y%2|(9$@7@fNX`hR}X~4E+@*BSsjw)aq?T zeIiN(2kjd4#l+(&MW17vua$vC(8bwGCQA}pn@b=Y#ifd4<{>I~q>CdSn>;3k{915L z4+@C4?$8=CHU}DYRuo*l2IEK7)K#(-iSbqTmX{?pU80W4=FJv!5S8^Fn;kEJqlu)< zI4hG&B~lvrAc-1j^4-zEkq~TbH%Z}i7WFHtZM)c`lv4Xn@XceTFe5hWO_nHis%2c_ z3P1-P+E!dFleLhFeBUKcA6Yi{)CLtR_YB;2F#w^ASmNWi@eiq}HFj2W>7}ShF;Y8; zUjuPzZtKlb=*$}&)T;oOuEjA>XyFDJ62I9c(9a>8Q7G-^qXfHR%{5!=SB{Bt&TL29 zo~ynrG6M+Y4j0Je@nW2e%@4zVj~_v!3K|URJdyXkVfv^JftMOykvp$&uyn==wD&rn z)z}JFWVZ^9ZKj*~VM`umXAHl$wns5F$4GJYzYBO4qfE?Wa8$DpkyakAG-s6YHpa~Z zUCd*0UTW};Uh>~t^IWgirlmdBp;)oS%O#xeK&Rniwmi6$&FRjaPO*$n_7)m&Jl=ts zWFh1#K<`TeCoahS@l>{+6Gnn5##y_bP>3LW)|m1t2JEg)_JN$y298?Dv?S}X=M}}W zW^y90cCQPd;_Uka93Bi|AVT@xJo?tw`X;67o|-k}dNQg=mXI`?OEyzcXm);Cr8ZSh zx$qF6CtJaB&PGqbp4=|v?T@`AnXMJ1UP}m!8n-W^A&|MiGP9q`k3;u3yd?W!g%BBl zNC=Kqvw+)82N`uWc!Hn8{=_uW3gjo6%(R+B?=V6#KJBu^T8(@^l9VT~|5wq5JJEp@ z2p6ek@padbk<4nSyNIrxvPrM4J_cU|p>WUyu4F+Mv2%SI*qob+wa_iRaBj>b=`4x9 zl(0Qzg$%OD#Bq05yxxq-kfFn`5|7B{+dbI`2VLgyn!l8j&XC}50R)B?BS9=y%U``g zSy#_Jo!uBwC9*fwZDwDJjwKUwV`P2nNFjmoF4iyxUyqaAGfrUROT}rH zHMy>`Wa7RatRMUl4<7}T+|&DQ{RSHd9*&pfv@VdmtPD7WyC%A56INoToQI*br+=8! z39otavcE*wL$(BJy)DtB`~NPH%LG4u8v`h>bFC_FoFA^0conG9Dv(Z)|iw*%b8 zN^r>MR-S0jrolwhx==>eI|btU3aV(t7oS_N)*AzrBnqc1V{yTrZ$=^b95KGJ-;=F2 zym!ZN#SFidgU3gC&Kbjov*Px-Ohz+@tMx85qld^BSIJ|AvJitA5>iqR*w3n*??q}I zX@SHxtE9hllr^=m?F5he+#F`veLA0Wjra*mUCS0BN{BNQKFxjyB(|xG7abo*F?@|S zFLtkq`276*LPA2{u`Pb)$?fj#2`Ssq*&j{_moSo4)z*%NBr~mr8Fkv;4p^BlR7m@a z+;6n-CP}xuaqsja+s0GkU^j}}NA!q?D~!S?J`!N77=(rOG&_;4bz-5jvEqhw*rpB| zKxkxUlNoE|V9{oH=xJ2c7-`I16M;=+#-6O~|9!#+zJv)B4-_xxo>&bgg|ojMMdSMo zTVL(v{3kHZxOzLDZ(4an@q#-Dif459FEf&z6U(kB-A+|!SQ#&k=|xpAjhbd3cxRr*a;S`bV;uzO>+v?TzN!iC)t{(uwk7JJ4!J&T(QPSO4k7%g%?mA4; zvxIkr_rj`@Fc7>Zph{j}+P%NDPx*b7!4`z6G3mfZjaHt7kOh}DfwoIA;s)32(} zS1vvj!^G{17SUoT(~DBZGmo|c*DFq(o6j*85(1yJ z97|MfHQz3C-3j5+Sh%Y3Zb|aI<#1;$0Hq?v3(4HXXXw%-bCx!^XblrZpbg1kPf0l{ zd5q=kdMpiD=2X^ND?Ck+MYJF~q}AuhHz^$CBWi!F?OKvMJx(6iU5X;pM{e!)TQF-^|^wfE9kCCnb3#msHF+#k{<7ZzeftKMU(>fvTQ zRJ&FMyU!Wnnvt|ZZbi2^Vx);nuaHY|ni0ouOlSfj2*RncH)(9vwQjr050^Ycn?9Qo z3jLJzVKW61Gc`^QSZr1S^-k^)X74WYD=Ry5Wm2(LQBvB(`v*&^nk&wNIZ>~6$@lI8 zKA+%=7ofJM?*;qp8JkjX!FPQ)$X3gRvL#iOv)_^?+N&s^^o5c>D)9Ykix8dOhNyOD z_r`7q4ioJpk3c4F2P+_#(5d`d8TLa>jg^`w&vGQ7EIyHgIZ=MPxZ`<2K0d6 z#A1tvNheF+O5{yRZayvS)|bQ7>EE>Alb;#yTr6DQ;XUG#G`#(!q5AmUOcQroF6ZrfUbbE5bsInLqeT^RZ>V}xVkFWTb7^r^7w{VBOU zi3_K@5ADn9AGV_2aJoM+=<>}M&Whs4SQO{Oa$Qk_hxj=b7oic<=mKp}fd*nL&K7+S zy(UfGmsm-#*^Xmkfimt4xgH-)daU9mfA7vkQYw=iCN2?I-^!pWXZ=axL>QM`P+;IM z^%nZ56wQ^AzNhLmpvlMaM~HIRcdn9B5&T5KegRBBtCawT$NAACv*R0 z!CBdo$cLIT%rVWg_FQL0Pi5woU=>-@AI%?*@InldGm~hr&tmXa6|OLkFjbCx6?;sO{Egw@N1veA}S?`Gs}tkM(}$x zCSY^3wO*XBp?T=$!Gk-Bpdhq#So+bXRJpY}QdZftPqQ;aJq4I>94q>tPka=bc;foPv<|EjHHDdGD_%i zfHy$l=cM5%*BRr%@v?Zu)^;IbmB!M4%Zt5ip$I@U%dVOb%=1Pzo1?4Gi)T+f9*tf1 z`mAmz$Msnf80PD@Wr z@8DABBO($K($g!Sv2G`U<3}54-aypPf)^mJGD;=04U>dQ#D#tdwGC{LtQ%8X6##Z4 z%fYcb*MdA@)sk)RT#aL}(2rR2q4GPCt_ub4K|NvXnS&xMX zvU0z%wj8q?kkU*t0aBW}YqvZOR*;)uWYmWM(_s51;x|?*%d0DA;smNpp*3Q*;YD1w z;c?A_$)kbX*FQz>8W29~{v+_8W&M4#kr~g1=AMv4zk8QI0Ck*T%r;eub}vPTMpnwz zXJqzlO@RKkrWRkEnnmckQfG;L}ARoa%HI8*^ zz->c6Rgpr?Va5u_Kyo_2QFO2%<>OJjGP}8c=v?QoyvS`E_x153B3M)0vi#D#{~=3H zTO_YEp2Iy^DIZ>Ix3mfB!og8i$MZ#8nc8^6xYl|`M#7nG&BJkQY*J|~q=baOb(G8W zwxh!Vesnz=HZLS9%Cz5VBTK$_>MH)7Y_BlH_O0_MY+1MKz0{7K)hfeLpS3pSiigXU z)ki&ccyC5LN7+E3P{2JPqa$9qDe1Vc3GdaE%>wRxz_5Rwhf0AhvS5zyWl%VaFIehH z@HgBa4bWtRZ=N_&3^^*^;&HouWD9n2Ys5NizM^Ri)aFb^l zjz8pfmhDkZ)JZ9eJK@NuI6bzGk1fT1TBYj{=ec(#M5w(bH^8vfH2oo>e0MT#z#6J$ z)+9{uCu5ZZ4!{P6^Ua*<`WCUQ!^=AF5W$N;{)&X+F990JkV4MnKsF1qe}Gt<^AOo`}id@B$I=E-+y+dX^wmETgu zg-esU<7}bKnBO?tV@XW3BU;I(_t+tRX3=vhZ=w`qRe;y;NGb zJs!041ZNhdb0)H;p4-i~Z<3L4=0$0dr@#WmD~;%Jtwe5vXS^NrAMHF=~aA=nN;0WBX#x8r$vP)HhZG4*6!oMBBhQN!Dml~H!|^;FK^U(#`Qvo z@_9VpEAr+^u~?#2&Y}s?Tu4tPwR^TjueE`Sg>s?r78)7O+^y{hB6mXiHU@W!`c!BZ z&FYx2~Bvrb5Oo>*Q|f zZZ9&{;WSqZQ?ozXu3UYp!vSFc$FZP(#JsnuC%^7L121Mczjx>Q)ai(uSZ zcS*Xzis{gDHtI%FgDr#i^`?sUqhV!C%9Vr$$K~{Ik>;7YmF9oo-%m4Whg{4xZ|#=f zmxW_Mx;^;}jz@E>TG*}b`&bSpblC=n&Wh#A&+X?u8%_KUqp_wENi;)ew$uRN7woep zV2g#Xvu?pvAAlG$XKobN8Vkm;V4KU>%xQ0!w(WeBPV4|*9dM0DBn773>Xoqay z%qd4G%Y{<3JcH-N;Usx8$Sy=joUTkzSh5?sM!i zMsJU-iCwfrLwim8uLhpl+~HK*`lLsy);z#wo!e>$e!5d2*WA>RSBv=R4(;I?^eI&F z?UYTXFoAaN>41#aE-66sAN=uGe0-WDX@yRB8b^!zJth8K%g!bC`}wKqEh_Qg2u<;N zJN0QwQ6Qjs&~lm-+H42Deou8|t=)e`4YX>XF#KR}uA=foh!DD*3D3p>UsNx!H2BR5 z2_OrgqV^+c4G#&aqxMN8GGUx$>nzIT@tC+Z;m3fjX6NGCKHd*~VToPv0R_~avySNm zTTm^hDj|Tx!FO=2>G!qUmc$@Mnbj2r$8(00p3h_1^U*1;<>(nbFT4HvKfMW8t;92> zqFFUoPOi>QgJxI=P!H#GgG2mQ4!bv$9D*FN2McDbFcccejA)DLms+%@RueZeyZ0JT zme)u8o`r#+K#4li=EVwaGInw>FH7q1#IgPX))c2E?e42Ph8AoUfZ&+x zkyL$}oxd5#;b6=tP+TLkqDVz67OfTj567(`-hXg*PK=bn;~S6H)!y&w5R`l|rbNK! z7QaqjT2iIeY8xvEPfFo)LDFh*GMw7_z}&$7tn9ql@;GCMho_4^+^Du93rC~gKZsnV zJ7>bs;d!lBLl>M2H2^&Lju05Z`$NR8xv`Wp%@>Hb({3O(Vr6M%>oO^zyTydZE6H&@4hv8cEIwx{ z=qtQh0|zIkY=OGb&HkjsLYcOzhQ{@s7|`P-pYG+wJGT#BfOe(HajwzXs+aGjPqM%< zBgv-OS~q_;EFLq?romvGrAq0%tGnCIc?N3z$vhPcRasdzg$8gzJCC>rWc&j>Kn8)c zUJqSqYo(LDX6N%@jY&G~SMCiGG3`E|$TiFFgu7-A3E52w?A}Qi_KPkgL<5J$ueqeI z*g{MI*(!}G>xESWZ5p4Qmx@o2-@a6S?ZV7C$9hgthqB6m&T_p{QAQQL_cjy&xrKkLmN#>XKVra@f%yI-RBXq zdlZg9t$;JR!TU0o2gq$hMlT=M6nkvFE)9efe*a)Y$6e(+37OOe^SOHJYMP5ZPhVJi z{R^X;Cs)`C6)+RS<3pA1ihe^zn+%wE`i)Y7(&zSF@kJo%PH{n$F^}hy#VkB6WXV+p zLkMQviu{x^a7q7GjwFva0GF<06_idHVzNcGsVex0Tlh*G7zuW#63IDyNgr6JBT7Z3AVzwG&* zs*m?6joM#?l;f)*`((pl5CylhvEJlnJl7n)T>`97OtaaQcQX^Q zwg2?5%}jr4D7VUv^=I=Xbn*n7jbPe?6JbElLolNsv_7fP$jUzOVLuaAoXf*Z@)g*uQQKa0h$V@jyD&S}YKae|X z$WVr6g7AItq@hmfqhM{u{`h}bs(GdG8X%uk6->Va@}*tpWVGx{ve7C6w4lhX&cCJe1Qzd#Qm-mmUzmQ6bp%9iwc4EV1jsad zCE0wvvKBzVz1GpysG}@EU3}V{|KYeOXA_yR|3cjnU3v9c*FLq0XG)j0Yv!5<5y`J(RLQgI@8|bWY*7QxN*S2WtN+wCvKK zyecl7Hqp7+pShAyrOU2z%)vC!l1uE%K;0OQcF66VPx&aljf$SaZ`H25YSj$mvzPLK zoq7n2&ZrZT}X>v$ZH4m5S||eyX>I7Z*+^vn;%A($l=}{ZH<5RgR@HX=HrM2h?OI zPe>ppGWYGsej*#8W9r!+H=?Z6jOE}wWnAd^ts?f@EgX@ zd-cqk^Qebq*5u8>!?hf{9Tmc8qbg8D;*V>p%PX@^AGDl7+&dE)8c(#JLEwX4&qn_E zG%qALukodigbauF#5WsCnIKEiwN)+=QU+G_0>OOF!2 zfB)uiIH5L?&gfs5NW9tiBG=4ET(0rh-QJF>uSboNmNsR^vb3_Yb#myYoxU-*(ZBA= znmXO(9V&dkI@efC((Nm*WUsnJjK+Juo!!~5yS!2hS~HX-NAYBG(+rxk(kS6^D{K>*+yqZR#L^@VuD3T zLzTm#0EB)9G7(l~TJgyYe>u;s*Q;h8c^^$Z}!a3&a zElSN>Rs4DI;`J@T6uY}Zlcu1#;cRXcwItp=7QEFJ$qbljjclD9TZeTTOyskkXE}%2 zVk5|+&w7`af8rU-3%y$i!UJ?yGnRo*YRKpvi6x&`01vtCm_`Epd(docK22Ciyn$5q zM65lmvU9USB)&f5kvPB!U6bT-70=Ih##DMcvt2qbzS0PIT<(!(rbD-~#4P^?I|3^c24D zC@5nSxUEdY6rW{QrH+-iy@or_%kN;Qr~H0n;zNM6wk@kng5LcSQ-o}z(6QiQj3`~K zjmE{RBV8qH&&1Vi^0Bj!K7Kq_w)DD~BcPHKPUheTII9~rgQXr(EN+#tPULe@tdL+g z@G3jgkLjScl0G2SBCJlHdGvH~_YOc_{^2ykJe$(q&I89+BGW5#0;)@o;rg_Nw7rcK z5FChvgc&%k`-7YNw~61R>kG552Ahq;{*98pzT?roJFC^&$brSm#sQ3adEyC6X^qjWvvekV;Iygx>Gesdnwpwixk!Rz>7~Ch^mfn1 zAd0CLN^3~oolV=HzZTvF8I8~(2Rr$R5e~`2FWrIDB3UK^;S7DuWj}{XPQf68vuURj z#0)eB%R$3KLrP0A`P`9$-xFM)>+%Erp}r@^F@SG#*}yV2WFC5}QmfP9^J4SH0MDPZ zv*=-@fo2M3L7dGj$$Ab!8?!xhT|NDPAsg54rSQouZ!1}IgGF{v98`LN|4NYo+L<|u zEW`&SvU?M8rwN0Ya3WEFg7(sat;Xb$Hv)nPunF(q?3cd1tTL{xY3fiq-1M0NjU+t| z|5=_Uux`JlNozpH!|R@>QlXH$$#X2#D4oxqH=ixBjx=~E2S@OR4w$a#dv24HpR%O( zBWU2d;VZ0=M0OTZivD$~5Os#jzAm&r6c@sOisRx8 z>8Q^jDonkz(L(VkNTo(?ohen!z1|jA8Dah_CFg=2FV1ztav}mcn1<6?DbY}poQA!O zq#4-FL%Ca`q@U3a-`?Wt;C5;>Lmwdf#o?gsIDJlG>YSwz9pEPZTAnSqtoY7QLSdNO z*TVU`3trzm(T~CU(!~@m5B^xEHh+JA1}rI{VP0VwI<>WR>PpZ|2xC>Q9jm^r#z#Ww zdXJsV1r1S34{jXoH}O(jp6bvR(PX5EZJ7`|>fkg|@1h>G%5-VfgF&`H2d8&Q<8q!L z9a(Deg3u6K5+|oZ!!*VKZesE~?mP`vA?#0#O&o0C=iX2TzdUZrFFp&vg7Uk$+V4xJ zO_Mua;jK~|4+kPO-fBEi+udDyhnJyzuc~ZEtKs3{G0}o+&St$ws?B1xa}1EM!J#Hih+6} zeIb6znCgNJoL!GUmm7}l4AyBi#q&YSs*Ps*)8`C4c+9T+ZSRj$s0ax>i3h!pN3;+me34J=Z_2g5@CRwu!{sEUeK|tt#?~jX zhNVU#piassk!r&2#2%1p+C~;HG-;{cX>}r%CwEyI@AwS=Ggd%3vtqR3s5o)0HBUs8 z7i(FmPGfC#W1&)MqKf`tAnFBdYv+{nYq|$>Cdd_LK=sVxA>Tsmbu8*XQXQjWA<4d; zo*vNB3TXyOsrh|V{+cCgt;OMnqYWD~@my@YO1Uy-fqdbx_oZoD$$Acl@Yzfq=NCd3 z6Xm#;ycsJ{0pEmrI{8V+#Dxbi%ulD3*qBsF(vO=~P+u3;OXAnR%H9~)C~Hft>r0C&2!L37AJWkUF4GmM+5l_7)L zItKp>u$TB~#pZjC|8+{|ap3L6=COam(7PVZN}$vt9OlK=z~;SNK?+Gbv+8{rKVGnw zm8GXjtOUt6ttOl+)8bh_`69FS`S?OHAR_&ztt>qdDCDDX z&Yx(FFNY-7XVs^tlpQ`|1)N=m>=PCJ^m{9wqs-0AJzlQIANBbFuU$@8UtTn3PJObU zqPJda0{^*qkrAgwwl+QO2NgE}1nA{-Z~66xMn$=mHu$KwrMG+REC|`mHSs#}RlFUttP%zF`4i9^hidW)K}%F*k;i0V!;|yQ|NoIy)-#37ve6Lv3oc1|Z1b z#jF^Q|8+;+se$YdM!NS(HuK%vl(E@p)heUC&KvqxIXs_ije`Nsup?;bJ&!>3l?2ELTlSnLSkfev??9cKbS@WahqOWneHK zYjpeEq>T$?y?K*oRVp2U#SnFSNL#2%OUxdSl~|TNJ(#8b{YJCdp~icb5*lUfbHo{W zK^y@JUhibj3?q_sl($`b^4S3$3L)Cni03OZJ+aB{DDEIrF0UW%70XDBm_2@1xeyz8Iv1`$=*~+((R2a z;vbVhrOe=Q5G^DstkGaHdLY%^1AcXP*Ega*YVb+M{opeOh{-gebpkor0K@X+aZffX zsaFY&3MU~d4}7J%Aie`fP0GVTsa>I1x#oqfsS;wE&F zckuIXZ!p3rvMs=P&Csy0!s6l-4!{xjB`SZPSFJeuyX{*s(!m41#E(#ny8#1Wtvih@ zowcu%?Q5^E-lN()~A4QefQLcAd>|~+`8F6AJ{wiplg8r=tm>wc0)|@RTOhF@=@?fX9;BGsQUN^ zqFOQi`*dcK{ZJmytB}#83fsqtO>Xx+8CO>@q-~_7sL_f?15%{~q6c=8JR z^4HVZ-O}@3+2~t!I0U?3e?JhpT=gX{SGQwBC%IEU-1jdoqHlWL79Sq{%xHey*?4`a zDXw4&8-tezXaa^GyZAyryxgxbMVXN+176h7S?E&euEfuk3dIoHG1c_PT{MI#3bgq6 zE4H99KQfL@*y3^P72jjq z`Ak$7zm59yYCXf(L zfzSBE+dYfk8A#eI}33Ze04vvsJ;N)1X9VO=3sOywDX-Vzl<@f zvzVYyr1`z##dOO>wyRE)>lDX6BUU^Gg;F;;OJGMC4(C!ZgT&8)0UEwp0(*vDG%X@h zn(HrI4^gi*RkvLBPvC(p{N2sL!`jYG4|gL3^rW^cI&Y``xO@CjPcIQGNN4cn3-*%Cp;oqgwX~Nv zxFW^t1393Vma&keEe8oa9F*k2O>E#)XkV!O;OKuDFXHd$aa=)u!Q#{Evh34U%#O90 z69{Si;Um!ZDs%4J+WxyZo1Dc07a)dbNt)PNwC7xOu zcIkl`Sd15vbK$>TK7(_|g4=E8#2u~ z3=q+yU*cNt7su4OlPX6b9So&ag79DkeY|M3MBci+lqgo18}w(#MiZxIzf0IJs>cLn z#Q=DWTrSr3d~Z(HN@<}tTs_CKY|t$h+Ii`KgtK7c3@E^rM3CJN$=FiMiXoVfV5OQ_ zjXKK#wWP-APdoa)6COFfw`DDlHWMIKB zvhu&op7;5&YNS7W=@@ZL#g=vrN@{(`GWv%VC&o?g{V!?+c9Io7g?) z)4A3CbkuyqgQxpQQs#2kM|O&)-!0t%7j5PNyli7LL5UE_br|Zg-lkO(?E0|LRj8&E zf^BbkupAVqQ`tWmkoqL#uexI=lX)d^P7Z5ysy#5 zY*<_hTMLz%dQer)l53Pr+nB;3xFx)w{%k|FyDIQq7XJq&j9UHK_f5%2Iu%Pqwu^n3;qwLQ$659!=qkJG$Go&{(EZ5+=qgc}|&!NE(9L*QAAnX3KnddSXMht+>n zKoLi8J@l4bIraV}EIUHIewX`~%$(m(n%kpK&()?TX7xSl@O$0R zhDX9Z21Eod`TBu0@#LT%kwW1;&tw9mL`lcaClY}9m>NGB4jzKJ&y{O@giUJt@r~11 zu;In0n)XRxY+v0FC-HgsgM-rhxu=0f&Pc8xD&BDK^mkae*v=H89hJ?y0;XY?lq7T7 zACdv4|B+Mk*ro6^P80kQ4Pj{3hE0!@DBgTkRcd)L8+dI&7w0PNbQogVlNkj3ut&8Q zghgc^KMefT`I4IQW1EQQxPIbC7&Y@M*3Xd3Un2`yeNol*NAXG*wlD-65pVa1m$

    % z#;1fwUy%<4h~f90!!&+l0ajk7*vIUut_8*bdiUokW_Zh! z(hWOk-h_(Mg!jJ3uqwb+DP4X517KKnkCdg2#U9!SIOX@WO5ie7@DbO!5d6EVeigr|iXe!(!;LvN@`ZpSU%b*>v5+F-tlk+ zBJUHiXxe72f`zu(hNXYW-K~ncmQw8&`{XPCgM4Tq$cMB|e$2k2hQH?CvVH}GUnbC+ zrH_R<>jWRF+)4MT11M8qw~Hm9YvrwBxWbc+IqQnQsTK+}t!cL1nX(7eq!x^(r!UDW6sorHQ@Z>NI!~SQ7FJVd(5|sdJ|YPbR-{^FM$f=7_Y7tOhHXZ;;DU~aULJ(SeGGQu)%{n z9qTq2R$G{J5%=IPfWWRt%GfT4D25{7bu=bmoXQalX5q-;Z8#vr z^||dV+O0)t5HzD!VJwy{Nwl;+QdHO%rTL<5Sivd}FNbWLWTYu@+-mN=>_j zVoFmmqpN2pl7^=@0j~u({sDRIGEPlihnf|~V*Ds(&qS&TE1hlwI8$o+!AYb z#~!#t9Yvp~r#Me-BM1{M>R6Twl_A8~JR4Yn_qaSDZ7FPhiEQ>Q$XG-?wz$aP32vUe z9$K`N`$nKGn>=pZTu;V0w%r^2PicR>sTZ}icJ~nR4QpbcM9EGvd7kuz8(Cel&uM}a z0(H*VJOohnu?h$Z+S$8Dk|p3{ypNZ}Q*%RqHbA=GY0#W8ab+WDozZmAX(gWPXm0CH zY7XI?0TNmQ0KMxei5~pCP$x3=j!!wA4noN@=Fb<3N$6mDGh-*O60hL(F}dLtEGk#w zh4X1G#oEsQ!%^sGJf8cdaF}8OAthxQd_nI_kmYKJd)WQYzHM4x!`ib21}Si}s(hiei= zAmO?iSOvqNub4A^l#-~h{y;*C!Ol#P=P9gh7`)4c7ZR$_6CVMT{X!K;3zT`!>s&3x zy@*IL62HWnHCYzQ)D-3jLe{RBAp=4}LWU9w@--_c=Sx-SP7sAE)cE<)`Sxc4+tV*e z$+m!Lz%N36@tfwd0UogixT@M@t0xysKuAbvFDNh395UoY%EQwxyGliX4xk}ZaGFzm zeSRGh{xb!@EW$!!?EzO}a&6^BgX90>0$3uA_i9`p$MB`d*D`}HqBtIjK})D((PLmS z(oHLbfNGz4^J?C4?2RV?MYDm|0$Z|m=i_gE7p5Y#SCW^fU4%Ha{?z6I*=c+w#2~$;<`l1a1-ze-rXDDd34&l@Cn&1nyKmoRc0YW>gfQ z!8mS;O(v%|(foS%M7J#O?wV*5rjJYrp{&%Fso`#!K?ePo%QeSMX5NoC7_>>Z*a0*A zo3UMoXPQ}$7RF2X1Ft*Cs9g3hh1@p%dU~T!PpZ|U$Dn@_Rj`-}#xy2}m7I(NLN}*XyZm`(aI3IYqpzk&l@#Z|J?S(w1HVt zM*v&rJ2o4T^56T*uuV3bYQ|};Ix-SMLbk-N7Vh_zxH;&k!rdno49(@wzEe!To3(cZ zwS?Kg(95yI^Uz0y(O8iNNDu?jTc27TqhG?ljB89rXtkeQX-JJP{;wFT3na1~lcw4k zJQ-vs_$KETkJo#D6*pg>cZkrk4*!g%#f@F8U!TcPsY2eRgDsr~wQOGv>{eab6l;KZ zheLXahY#J0OXLo-4C83UnR|!3QC;xVaLyktjG%@>)$0z?Om2MgbaIx`ms$6z$z;O% zDVZwp`PwX866P7e0xj6DyJcS(v^{ixn)1{fpz;B1x|#o3DSMO$Rr;-U@c*IgtE1v* zzHK215Ind;aCi3v2oM|+AlTsU?t{ApcSwRI!QI_G1b1hG!=Qt{&X;`auJ_*W-nU*Y z{+QL%bWK-P*V+4=eNHc%<2DR9KGljK9eZAAo9nnF%(T`6(Q1rcx>b>sAc!`)JR;P_ z^!fOj)=7M~1T z`_97du~~!PgK?gAiW2n?)C6Pgmxy zowOSk>HUpKNBeu68}WM-d*7C5c6^0`qHPko_H#jGHV#DT7i$WgIlf1;FZeBGfGP+f z#=_M4^+|2nor($9C(Eq36=B%}yr12VS4MI~Z%by)#&Z;pS6ZVOsIUfeDe#b5dUrN4 zus!B%TfqsYB=1I{T>7>`(r0|}MLA-g-0oG- zB88!=_VicXMEXK#om0dP1icjCC{t4?kv_dsj9|JyCJ~nua^pa4M`gsRyYiMtcij}T zJuPcLYT9Xi)*TNw4oC=hZtNh!(L|kHDS7c$W!j%vcGJiX^!;&%CM z@mf_9YG^KB>)`q0L!UAQEqpI3VU+!t9`~=Q9p1=CcN|oq;GO1H$|s2Y0sZuy-H%9@ z^@fe&EXA%~B=$pB3OBPNOYVv$3dORupP~kM*;m_*cXxLcvW1e-N%&NnTr6$)Na~l} zNWOjhmS0$?*6QW@dx}sc-p3kb3EYWMZ6@>eGJo5}n5I2Ez+k*c#`J zXmzwTxQsPU-r5obKNg%kl6}Ri10;vX)>)Y zMZxAx1#UUdLN12`*XW|XKU35GY%{%c9!CDAU-F{zdS_H{?BU+ek)hRpHCDj^qWjAo z&JM*Nk=yWq{)focIbF`wACc&%BB22ML= zc_+Ty_3)m42D@?zb{>Uh4N`PQO#V z&XHsVV|YiuG&ovx!`S>5^hk^ubC*;q6P;u$oR!*t@!6>(qD(hkIF$ny!hg*qh9JQ_ zdAfcoGFi+8zhdm!zd6@+owGTxyAZxLE$-BTHynJGTQHOZ6A^dAx5kc63O{@^Zh$F; zcX>Hr32Je$qT+w|N&t*}e7ZNFDVVj6V9c;hd~UFG5}U*U|AArg%oK#0GknmP_v<#{ zDGSznyOprF<(K(hpWw&H#O$80)ivL~bA`EpTfac)oJ<)g2^yt8FrTj7W_tYSr-PrC zsX8BzhqP#isYbbTegAR$`a)IT@j9AJT;QVo&iJ&T0;d%|O}?{uwY%6e$^1ySJE!Av zq@gvxsU~W`wbNX!%^9N9c7Lf8iN`Em&)&9nNX+Y?!_v4JKQ`Oq>C9)hGyr^KFqgo&C|MJjH$3_jvM4pBoX$oJ^^hg2&Gj4}uC>TA}7* z-ibS^h(9qOI>9FTD1y?BFl*uaurJx1sJ_AXtC3 zyrZxe=gOitUmog@TaEPF(b$kyGIqb(Wy#fi>Nc`ni4}*X>4=qQ9d9mDdgp&2HFFO} zJ#Oa@bppTY!v}d8Ve}v8bngWu58j;ehKi&F7b^#_hh+^XDr%>|XO2e0a|BO+e*7hL zepMUC>7OZtW-b@DPQX_GeyhzrHTxprn-l@)zG=81QX2q(Y~NViz_sZ{k_11fr5PK; zbt!z@^dC2(*iWvmAjj>GW>)i6B6s~1`DB6;a&jnxgM*}ESp%OXBS?kL`NJ^jt36J+ zV#yV|x1rEp0)1RIY-f(%Kr|Eb;U44YPfC!P_snM97HC_AV^@=$G&$(E)tqT1gk)jY z&JcLDEa^bGRk_-MrTisa>0!hBZ#b5nq^g-^rBnhftm`hCCdMyg!Y+%H(vaWDD&C-5 zOZB)#dsKWL z#V5xX1}KFe26o?7EwD7iH7LdLbA_l0L0#0$V^|bbcED=OYfADxSZbLJ!-Hz5@!Jv( zcP9PY)Hyg8h^ivj=Z?#k5hfv1)KC+>P&m0HbajH_sD#0t)FYFTLI+|r;_gFEgp5oT zV49_*wldbL3w&1Fd>fYS7HTY?5}w>3aPTaSE_McZa8+}p-vI^0dd*P#fWKAoG&)5q zgIdH2`_(oHgal^?-c@pxBD-u0k>;J#gM;px%yVSs*^-^+?dryT6D~8f&t9wM>9rns z;w5Q=WLl_%{JzZTZN=i9wve^T$#)*xG!!(;_=@4k3Z1kS1xNI-DFI{p!CSS;H(@ui zNO0Y|on!ut0>nCI*p@|frNffd&5H+oFNb2lJz2YVuB5}v;UcgukbCBdBe`)?W+w%CX(Cj(hZz_frUwwUmN=iz9s=nJI}N7b z_l5W20_5d5;iK8^A_PXyOGXFYx}V)$!!L0PC!B;cO?{_$YxIB>`J0de1g!^{mouA= z`|HiPbS_hrFieUz15f|Ko=~rguZ@nITIH!MS_wtD;EXokThM~Vc1E??D6w{ph3cAZ zv1U0qof^9ljDUau|1l>A^!0wlV-p=NgV$PPZB;XVxYw>8Ky2DmhColcoMJjd&2EQs z!zrwejk*LZ`gJx$>e6v$8$A&&yG~w2jMeiB4h|;;G4G|MptWv?+I%@@pWaK^aje+H z-up(AT0fh>0wa)Y&{I*p=`&ZetDdizxA9EDBo|3z(`y3qd}9Xl8++eKMJ8Z7VZpCm zLzYylF!wXSJrlLQ5$`qF0JeK8voD{nKxbrR#HP+dbud?%_f_&mqnQ>9{_md*{7l)` zWI9r3nIK3+_9JbZEwh`#W6+iMGk6e}+g)ndF;VxePMl$^o{|(JajgA@4dGj z1m?=?mD}Eatb95g?++n@^vcga4>}I&_498OSh__~Xh*WaNV{iH(0bl{_ySBKB8()> zC&DRwx~ebvNLk?|{!Bg8@wg)~2AbaZL28Ss=uPhS_s^9jz{B&ZFD|;(m*JS~`BvS` z=%8+O%C4`M8sCFAA`fBDN!Vq)qlMt(Rj=5c)w$8DZrQ-^>a=Sn666z^wVujE@6Sm& z+Sq$mpUU)0Ue2nN^G{hJc6Y6-yim}AzmV$;9dt0){CYu9|WJM$SXgc0;SS{hlb0=k$z>4RBj*!ToHlc`^slei8F(P8WqoWQc0&t2)fu0o zLkArpE+Ta`-zzHwnuG_>LL@JTU+iH%osoR}{sOm*y@txkc2dgzw|UV-r#7sgqKVX> ztKGx=Nx}4{Z{~O&^S#{%@<*R5c9y5FIaLV*bfnq0re_R`n!+=??P>S8OqydoKEyw9 zJ_=Uh4(1C8*j_*6J&|DL5H>Kg_h`p2@y3!E|5Xk8f>W~OzOlMu{G*tOr`TuL7#FKC zL5X(T%^RK`^1E6<{4ia`%+qt`Grmdj1Igl~4_xiH70&U8WkiWg^w00stlH8MqbLx((OY9y_h1^F! zy~~_scvw|r`Sl{>_;MyeWd_-vJw)Rvr8%-jsAP@~@vU^S*>$qh+Lo;^}C{T{DWErX{Itq14{NYRE5v&`@e zSJyV7ny1MLL1%09BBIx;2V$_Um3mL3fkl;dOR7jBiNrEy;Gi#pgM3-#8cd1&OF5{W^iGwUD(}lsC?eM^+2#GgTFo2sDo_VcdRU` z08I}mWi4}7Gr@&uChi}U5YPPExoKK5l~$1zBW`D~JoKgqy;x=lDQ@C8-IS&&N~msl zP;tW@^*Yaqoj4S7Wo=2*0%reybmMQ2(O4$y@_U-7oIG(E7$0fHa_qsdUWl@h6LdRP z-R{L7{>l1>e?mvX!ga+HJ=@Kng1Fq-c3=;--zZ4r@A>XM>?V}BoM&GQ2-YNMhPf7b z+OSljYfri#Kxi8L&s{}+R#8yUWQ`n<*<}7mKXUZT`o(kv&`70akM(5%@}s5jxw#ay zw5p8Yd*(EruVfd-#+KaI=zIk^b{S)WEs?XKu6bI(B^8@ae;wos> zO_utSc!^z5`U$nXLL})E?@h>;5+h%TpD7hS`&C6Fl4ERh<3+*AYEL=v(HqVkySPYH z7SeYSUaFZ(M|Nb~XRk$Hruu`mhRydQUOP=*-*1OER=l-l0G*e7-J&HHN!0LRA8JD@ zD9estDGsTA7QhoabC-QyI zMXv{pmU^5N6xf6d4tPtFoBZrjVje;Yk5lOUWPsqw`dd(MJ=O%)piEGVYmA+^xnqup_I`>MX!qy~<*f#H3NPPC* zYR!~8r&U#K!`M1WkuV;u&jv?xKu7f7@G`Xxg0q5FB4rF~MWRfsJbjvEYfZ|MPj4F;vr;5))TpPT_0v;ail6P! z1vRv)y)5*Xt2rd>jxP-xK&;vjH=IAEi}v}%IiW%QP_TIN;}fBqPg9T!oHI=+Vhvp% z$_M$$uZ99)^|T)$&EwgFl{UA~@s+y7*f=@LZ zvUa_wR3{oRQV}V2tGDH-@LCE1(k6~(c2|NQ7>VRarO0P+%D*}2wt=9_h$LvHKRO4> z{x$UtCtGNFtkwPq!JSx(yaV^uhmPH6i#EI)Lr&ZGIs7+RMve*b&O*JH&8E+B!cx{D z->xRhRIP8s#4;&jvkt(=wDij%ymOe{A=UtyW$w%z`3W*bMvpB#>XuhROUDKxK+kfC ztjso)NM#9ylReW#Y*iji)|S-7l3p3FGHA z$CQk>Ci6sSba&BqS!Icqm0=@OEG?Y`1c`z*G+4cIT#ly#R@sa0OO*u%$yw>2Z+jhd zT( ze1HG#OnXET@u^)ZNz`BzHEiuo@TN63JEo=Z%J%7zVtDClR$2E(+HLumi}!Z@5?d!M zDR^V_?eM_r1^p)Uu*laH$9Vk2YvfD=VE?4D0qsHKw(}m1MkuiYmYntPQ zfd#VuvtuPYG@2pt?0aOQqIzCwwR&|Pq?scs?yG{V`_)RLGs>TOppn<>QV2SnMCkO` zZvD9>%bL;n7g2^yYd{6PEjcckWd+N2g#_;M{Q?Q#0M|)&62)>9l0%~|p}(m2r9u7T ze6u^d_vL}%a-m|$?KZ)~Ty@y0+fk1Z*Sb^V$<~pMMY@i%!p5`@$}qp332Z9mAjUho zftAREtM0|s)MEaXZ{{>fyYY%IokodRyaBw&8Kb!Jk(93Ss#XsmXZ~^cgN3eCAP{1- zmmtp&cDO0@qvADnM%_`bvHgR>fZim`K#f2y?GFomJ%21cQTF=jLd;$4PwEYi7NwVn zk5(cgn}Wm#SrV&-7vEM}_>Bn;X}X`L$xm*kKYZB1$k!QfCcGQUT~g!!V!%xX3ism; zZF``KaZ}9GkGkpMvlw1B#;=$L`Li3(RJk`Okwh;C)BymD(>mFx`}C$eM;`Nj468C4-u zrrU*|b1OE^0hQz>S5$MW&!ru$@R>t*oacUKH_tD;pwb1El@DC!cZn|a(8HG12;XZ< zIqS%00LKfe6;PMNthCxRBrn%Zah6ey2oK-tg=pR!)EGb0V2D|5DxWhK8athOPx@eJ zqG6@<;R7MyUknZn-B^Lr?wnJTVxV^0_m4kwvmIT(zeOR77G}kgkv%-4KfM`LQd(FQ z2;9$>oq z13-LD)JI~G8zWRJbe6Eg-p?iTmvo9xqJ$WLNFx5V!cevneP^%y4|fM~IHo6$i-gJ} zf~-4QD`-~r z=}Jsy5*kWzHa_jj7m8K!N%ASV0LruR;9O6gckF0MCcBcvh0Vf6)BW^D1wxANd zpqAE_iD|ai;Y?q>uES!U_f^gVMP58i zOV8=rI3nGu)hP414xQ;wxrq0;_L99lkLGVJu*?gkY%sle*!#>2ajI2G0)0{m`U*V2jZV6=YGKVeFvx=x;Q;rLH3{!{n%p(o9dm0D%AQHY4hW8`ZJ{kXV<2W0v#fr zac83yv^AH}}BcK*y$FYnu#6u%*-CX0W1vn@W^>6FbBsAu3;yL_nR5xKx}JLFC5AB`KItC6;;S4 zH)CAnVR!xTYU|g?;kpLMm&;*5pH!wyC<;Z!u5L~1`aS{MeS2PRz-Ry~5J>?r*7G&d zp`R$kkndAAd^q^{q0?6P{qRGzr4`Gw z(`taT>7Hp|6=q#`+XN7mH_&-CyIO5VP|J04iONqiao7jr zjqkk=c{|*;(_!<$aRBLM%PrhtX8ot(ih39?_qe6{?;gvZ|Gu*G$^KmksogCib6d`+ zo~d)u>wySJEy&l?c-E0mWF-elGk*=CqUVR*FO`K9mJZ@d&uPcXMLo>2t&8D*Xpo?A zqtMwk0TcUIKVqG++?EMepuS97OZB*?m-p;z{PQSUzB=wnFuRFv(VhrwGVQW8?4+xO zLLlqi^}80UZRgdN{`Q@RIqvfAdGR1kq>0O_Hl&jG4G>lhpL^n%EARkPyYR!{{BQr{ zoiW1o4y#(ylE49*1c|iTr^Y))tf^h6+ii@!8}+H#&_30V$aNPur6(G5(M9o=dIR3U~9>2Ne%ER{CvUn=9ieI z$}|J%nS`s(Xh*3j9F%-(QjPPGy&oQW`cltC*9hLgR0v2&XAi0Qe$SLNz@`t~yjA-s z6Ob_N*a^`P4yNJ(fPFK(E6pmf`PwZ~ zP;Vv<$aF#(S>Vpd!e_iX+OI+k%agp`{|X;YoXOQ%ej#;u&i7IFySDS(*>mD}Au-+u4%cnLs6w-qzkJmQ*- z75sy2alB4-Pk8WSOCoDkj@9#ytutFgPDT6IfeH*swe1dTzZ^$+DNLYI?oQRLXI)pd z@~1!zMWV_qUA9Tn-f(!9nMrwz94|~z*;U7`Kr|wzUEDuPHMF^6%dt($MY0&NW&k{mEV3&!( zrq{QQT`5jqd!ZjS=RMKT?;m<=uzkZ}2Cj$|xaG=eF-OC+vFii zajYIyZoP;$ptKdHH=X`C#@$nq>3-Cj`<7XoyU6<#b+taqsd43LWAy7J7Bhp#J`c6b z8Ct2J#vXvitke?KP-V(JH9LhxlY5GH-~x|N@HwwknVR51jypT+0vcpE;fkxqZf(A| zPsPr5GDAnA1lRsYd||)+&*`4+ zxU;+yU=2Gjkp4>4&c(W`fedg1NAH4H3h>Dmi=3gxUi?!E0FTEvhsL2C6Z zMtP;8?(Z+h&zQE{F=DUBKV(k^Vm;k|VaJC9`VHsYDy=L<7*C^MKUbpZ!)gnoN5d!B z6NYMwUubl!-bA&7Aq>CVLXz+vk%wdX4Y!`gMAQ+VvM+~*A@){mPLk#8M|z_gu*x@= zeZH&M(u{p*3a*9M`}O|voVOv(+l&0)cq`2&*UETTi>5C~=l|x$x~Isv>_?&>hhInp z8Qvp5nmjMn5$iyLDN-@cic8YC`&8v_*%~{v3a4+sB@k8s3`)MkIogLZH4P>VudwOt zHdB}OZzGSTn44DfDC$ANtnnouy`;3A^f{iXe6e9jb(t$;4F9^eq)#10BwifyY2$Tc zKgS8}#~aq{_7c2K*JbaQh9c%PbvM?T{hKfIsnQQuzR+TO*01bV zb%<;dI!$#$MZ#P@aB++D4>4Zt0*GCKh2kTr(cFIT{s*cr zxA#d^2cmjDSXbwFIB);PVp~#A!T39^d$$Xr0gV0?{*%Ygs;9%5hh2N=U)`A>2cXrB z--hR7On%^O^E^6vipB9SGg|(72r#3vXMlNA=f*3R+^iGrHK}c*yX@6>ED>=JTe|DoKnJ7 zfl(AjFFQ}@t6TDfD&-WsF}!9l%+$#P_c#OVov5hhSm(5lH$&E((8RSMI}}W;a80x!Bth zuf`tAJs*oko@}8gVfb9oai<=wtChbuef~{8*K4rl5sK!PwEKEdtt~fd+a05hNzc+y zt9t}tv7&i-uAbdshY4r28SwAODGP;^7%v>uk`6)F1_k?ab=-~E*9b;|1)qM$RYwn+i`&Xu||5w{P%OeTi;4^}XNpZLApJ7%GXi_8QE zctIo$4nIkzkjGNN^^LN_!TscMU)JbhDEYwl#o9SxrS=6U1Ed1K$m$oRhOJ06_hTur z{vSAfN#cuhCZ4ZESyXX6n=#wUZ+09Z>>zVTU+a8(x}+vv1y`EZm>A%j>ag4Xktp!n zd^2NcYky;-=CDs{$Iun&6Dw{eyJx2DByXA#(JlC)!g6upo2Nnngm=ZKC(#g#3TG7G z%$Xwa*-ErtO=9#D&`?wxI7b0H5p8LzfMsxZ5bEBhDcpU}wy;+8kLkaGAyuF;A3SV6$Vm=G0 z$(dGuS4|f!QJv*?gRHL9Y;DX_Z}b**VaxSh z4J^k2x*$NMiVdK3&{S406wp}v0bEIdx z(V6}CA96rpB8>YUb;mRkMXWpe_4kQd8wDIF&~(5Z5#{zKEo(JRrI=D5^EN#>?tSND z-!(T-`uC0OD)}~(`DO|{Ce|$^YH7mqIR$8+o5p)`{$ZUemiIt8ao_c&wTdwv_`C}} z@#E@?tMjIW?i_cDPw&PfNG;A^xNB-iut5gI22%2Wx>&t?jSM(Lb+IyjDVBMG@w`uu z5n3aKqWwmkZ3MfIfx>+im(kg-P1CEc4wTg7K8e@Pvx(5+u5M?uA4TT{_q?q?`L9Te&bn%9bfH+e`#z2rwJAq&?3FSriWhI44!EcN-=3iZ6aY`{Yg-C% zAwHYy z9qGRa4Q!-`5#Uo1xZexz{2mv(75S43bsMqau3=gOgQpu8URsS#+=$(~DQ9bI{w>T< zp}E^pQcz}dbWC&XdQUDkulD3^=Q>bSF>VYfrYn_y(jud^(9Cw_NAYU2XtmHbSR2s1 zZw(FbGb+muoAk%F%PNS3&h8gjO``xTG{?TOd=kI&R4I`uJTqgFfd zX^@KJ?tVg!WhF=veRYMaIu&|9>@2Kcw~tOjEv6j>Y2zWD0F;vi0Z`}bj-tV%*@8V<<{^Zv8AFGO-3yq(h3PBoSZn++01)( z3bkIwjHF$V@b-Fo9QskJML=jhZV zDzJZSiDFe4+Nr7Utjw_w`cohKE|2%WIl zTI+0Q|6o3oGi@{aP5P>@m(AhCxhc}rj@E9A$BjR?3mQYBC00x8y-7}OCyxkYE6b@& zZlK?BXz3@_u-EMse!A>a$rNGj^P2S^eu&i=3ye~m`S|OhRQ6#&i#zTm%FBDvV$03V z4S(X*YGRU)L+Z0WKPS2?Ggu@20_5K-PWH#a&yHRq*+BGF)L%x2-Q@hIhqP$+c$=`e zd1(4XL(XSwamvY#Eg1- zsP{*sb2aLbIa(yuOUO&cdC>Ed?6l@dY{%d!x*;bh-oL`9HzS2TG)0g1)MsNBhr`vaxj!ZqwUwX4K~;7EM9zbYjoK9hCe zjWRCTOZ}2&R`@GxiMP_m!wOWR=pW#mDk$Fe>oqwL$o{6pz>es2DpJ7AR!@9pS^+js7Bxy~q}*!~Vz$h8flxYs#@ z-~9yvP#wV(hH**kxb92NA@c;L=wR0 zjK_^*5GI?LnQgc=d-@&Al*o&5&a`Nm7$YXN|YIY>8w14tW4= zOIqbuzR0}Y?+~sMuoL!6)-A>??*Co_Hn;oiA|Q57Rn=5#jo&v9+~>YrKq54eb*ue& zF$@*(PXSR;AW&*5m-JFpG#d!7pHBD1z{!pLbwWW^*EqKa-GmkQr`8{imMuTzAzoO` z(A1eJ-k~F2Qiv&Zt@?30KlnV{?6ar%oQ-Zr@a_949Q{mYY&XljKOS~P``hg>!)gjN zu;$~{{!|3!lHE`;)0=d)g1HvY>SMW9j7uZ{`lA(UHZ0RHl5 zdH7TFQP-i{?N`n)Ukb54|6MmBanbaU(qe2&fw5e17SjiUOo5e8IK5F$N*?c?xi&3< zg5m$pT@2Og`RwNDnq#wa%}3$v24^P+6B9teHkQq*t@7PQYp5Q#QWJCCfE1!T3 z^RST7u0I;d8%{XMn)Y3mVTompKAsAxtfo7aZC`P*UJcDr_8rKLU3#_rwj}5BWj4ty zbsfkz!=)jPsNEFrr0(B4e7Tyxbm(TxL&!md25bv=qh>Z5%>A)s#yfW~8Va$IjN|rx z6Q02SAllq~5$kcQx}EoV-kU}Q0uAtg-4dC~hOoXEmfVGeR>wH3dfFh7Gw6ZGPP9i3 zx_XY`1g3c9LmrltFJ}fpn`k8cKK}~o1L^*-c;B6?`03zR$Q)-H_*Ya8)5jb z<@>)R$51QtvB<|@)Ba2jXh5HoPRwZo z`nwT)Xd-gU@a{;pn6U~jz;WIVUP^SpA{zgRk=TAw8oe~5tkQiRn|si};MOUm_dCA) zmqu&sx)w?n@NlVN51-QiAI zO+|^}PN41FvfaTV0f%#P#LjYYmQ}WRs$Vk51>?iul;Hkb2ak@;m!7lIGe-ikwf~9c zUcvRf`f@ippZhTY^5kOSJFZwtqSK{cf8=PGth*;CQV0&7O|J=$!@AP#Uu#S~)LtOs z?$gj|(DAHDXUO@{+YHsS7P{ToE`teY!>2>e`#%}n*YW>RtxG8!$8=xJd455_*(L(8 zMd^g23+gWtdQBoXq>A_dkWdH3G$4c;muQRe*=VspqPdjo^Phm|u>Rhwdrm&97+4yR z%a*s|zu?)QFfO%sbe_$UeS*D-k-x!*sPcU!TjcS*BDCor9T)(@Hh%|pa-(8K^{Rqz zz0W%~FE&oSPQ6+iT0tV@0F@+29PD|TC-bgRfru(7u%Xpdo=Ky`qhKSocDq+Qhj zsEH^7OxV7D5qNn+s{|GhHpCSL!d8roj75E6B-iLAa?RRti)MV#HZ6ksYNow zDL{zz*5aR3*WXr)8>Rp_)rINmGfyXrZcAvm7@h;TKtW-I7G3GD8T8uWW@OhaBkewJ zK9&Pyg#R=r$wBu^*FQE0t5M{C3!?lz<&6}eSNx~RVyOawnSYv^)PJ2C|4#nP{Nd%7 zXvx1lg*r1aVxpt74hj4@AY~`8*Y+R&5eU@$&wY%*?41@KVz}pT`=tSdD=Q}y*uDNz zgeC3K{J3qiS z7I(gh*SvioY@XL&o`2<0&A-AJuCi&?vCkB47spSjLl0<8y5f#QZ{{{1^D)!%6j|{e z%Sud6f7?vnJxBuEe5YRY*Dh#58YEza$rPOHKZlX0X4KRxDfCb25;j>A67+m(400s? z4%UF9K$%M0>U3(YzE6OmR#Fn~W-$Ym=)9z^1e$5inUZ?U-+MPN&!rcG z!5+lV>VT-M5%dTRid+7EjUu;`EOp7>evvZPr2I-&2M7Z(( zxsFq0jn(IRbZ@z@sG+-yVqsGr{1KIGYMaaXJ&m%;(Ujbbp<;*5Uy+07D~j@AV}Y3- zW;e2J!bMMRO+;Q>KjS|gIe&tJf&q@+gzDrN8b(ZO;PH&MCmh8w20N8u`CY$bihCG+MWc{kDrdC^ihU0)(o+C`&nsp6i-{O4 z)ETajx&=lh6-Dh5n|@U0jbMaDh!cvnp-`E!_(18{ayfuX-A|;In&asSaE-By79YXc zj{KvaODG#>s_p_971|zAtukB6EJM2ky4UI2x@F65?sDOFn zI!p+iIHS9*j!eJm@#A*gdVvy(W;*p|b?3_ZzW?g}4_=G^YN^f;CxsCMX;Q$Ue*eY; z(ww&V5a>LfN%+tjQ)kPne8mjAD0bFrauH((^XF(?iHroI(38`=Ea5sC90_w=-BW$AQ7{CZZ$ojZ*^-E$wuM)Xm2 zMZYEOih`p5GF{wk2byuicopF5Q&lPMe!p$H;sf!6nT4~Qp+A~{99a1EQJ5~$B*3FgyuS3^O)OOTvk5vCTBrwBmr%>JQ)5LLC?dFZ z6qYri3|D2^EPj#2vj+JEuC_vvGlwP8%!2or5+eeh3pf)!)?$b}UKS-OwEBKd-Ub+Co-|Eb+n@w1iPM3F_*7#{XFi3 zMVfJ-W9^YTelSsKvtN2H|2&)V+^_c~U;&J~ZWnBUy~UM37gV)%l?1lvNlT~?!uwUL zQL&qOYivu`b_g2w8(VcR`uRP}%&B76gmN07W_z;^?jL<0W`Es{2(!#(4RnoTH~gAs zYtU&a<3ud$;rTh4Kthzu7fV>D;uNd+4hTIX&ESWP->aSIW)HVMz2KpllN&vL z$E$Y;BVGm^m5N}BzxK*yqG@*@DX`9O*qWxHSXM5UT97wDw&#m{{iO+=%!xhtFubl5 z?^F{ze2|WvA07$cR2~Gm?^^r(guCBCYqqrIq*=U*#T4Zobg8uINXo+-)b(N3C+Fc7 z#U~}ietXsPh8hW?zkq}3uQ&X~{IL&;f;xOf%YJW&<+xe&k>F`gjwT21r6jrE0}jeQ z)r)U=E@wvgB~11{1+Lt&F3P;ZsMf@ytGa#<+0N-_z2x7!`Npnsf8s0&DCFTKZv4fA zEFA8dj?wft;p4qNm{KK z6fQeq=8-g~Hd9YNSkoLo-cW_EtedWpm7f-U3(K}IZ10SXX*ugCZ}irrO(BwRrd(ZH zSw#FX;16g&ez&Ypv_(5l(>n9V)^PbSOq1oGvz781KP#_bxOV!A3?F1nPZ1>ov^Sh@ zCjEx=TIyX8_?3tzT+APm@62u#ahJI>i=_K=N6alrT!$Nvp|@{+q3U+K>#=UUMXj(i z2)En8T;d7<#KuHQVeTuCv-qT>DQP?ah)L;JQ*XPvy1rLeHO6k zY7t1D5WhxGb^kG=_t)CnS@RJU6&5CBWXOrHb>XzBlTzng2b{{#D*n@)u9JYTwIJ!TK7_MMGXke;M%O}LA7fO1Z7z3bm$bpwVK)aq5~ur2}QrfDD841A;nDoZ>E&Kn&SeX!a~abDfN zO1#-BUi#VXdpDc=LcEx~DM2_p4titjYXP(T>&{V9i25wT$r%q6XsNcIEyE9jL2~`Z zPS(3&=ab4L=-X~En>OKHHVr<1We97XWSRjQyrx9xPpzCqspE)%K;oM8uhdoSF z4_fZ8QvMmQ0iyC=6_BcGCy*ac2)tx#-Jt1Ex6j|DoWm@nzHOqY~`IZ2rYr10@+^)*nrB>4MP_2HS{>0O@qP6pl; z6ly#<;8vscCtZ}v3EzTq9Q*)-0OOVP>ZQM_mLfQY=arKa7n!h|4b+XT9w<)%QHs7#dG0{dE%rh69Vt7hNmuC9!Nxutb_|!Y2A4PzN}Tn zMI!@JXmhr!BW0sjIl9+7)*aRrhYH4nk530CSLqKEi#!eW;J%CRgl(3sZ%gdI+85Fg z;YgM@FQcDRd2|6^_;JHJr?7Bwqc<01AgT?{7?}Fv5dcQhJy|xtZsBs8I|Ka(c;*)6 z`Zq;$7Z`pqhqYHg^^H*=_dVnO1CTkn*&k;<+1Xg;aI=cUv`oO)<)a5w6r`wm`{X4 zp=L5etiwjTKMNExcp~taHGD22Ne82wcQdz&8XXETHWE(19q6gbuB~~Ra~YTCDijg^ zFY?|xsLC%27e*yiNFy9oX^@g`Y0ja$yE_GGmG161G{Ql;LqIwY&3zSr-`qRj zy>n;2f4@D$40GT;Z|%LFwVw5?wV$yb*|{dT3`v1@VUqEi+{ZQTzK8nRL*IQ$a_8iz zeSX7Vo4B%#@HQ#jp(SAPRD3e`ZHlVae1#7|Z(j;=h00tX*`40`=^MK7m_NloMyxfh ztq~kW<5j_P=|;Jx&;OT`rLK!{ z-$S0?h~{Y06Tiq7w(iX&sMC9InK3gtGu1kO5K4Pza2K3_S08aXeEAHs-cK%GN*b{WIM53fo+@+f*K*Nx>o5W_|<>4fpkH zHlKS>8pW*0u`R2*>im4FKh~(j@7d05^_C3NS>^{I%w%rQo}|^^OeQ(_q;Z-woGp@F zX$;9a`=kB+WiirzDx4jYpP$eD9Y_;VT>}p$o68DE|KWQ2TXAvszAZnVyC8|4$39mu z8u540e|*ct%aP!di3t^;(#hv)NdMzcEdX*@Q38OECMG5n-uwM?49tto-Zf6|nx1S7 zXsYj~ym8c<64`F~)n zfEolSum6lVI%T?l8Hg~<|DSxgwW>c;CvRv-p22OC+tQK=wA3hV+TdJs0=$&rd7 z+1S`Hz>}(^GJE zcCOitgX*ZO<7{qj28!i;O8IBr`mCOBuG(uJ#>z$jRUN=@l>b;f+D^9O^7%UA(vKS` z_D4eCyUfSwk=YO&v_x>Dx?vw|xNZJO5^hkj#|Yco*9cmY-k%HJmF`}3yZfA3@fDDI zPYInfmHC)GyZ<^pas8k9cZ+_$87I9l=`~sUYrv|#mvrhUm(TF9!5L5cz&1*?ja8T= z>CDUp{cTou?tjPdMQhFOuYB5;A$?ujS$24-v>Zjuk+AL38K|oI&Y_klMjG!%{D${~ z_#NGu#&!-9ab{Cl>%gNYsE}oh<1TP%zWx?kXx6PQczij;$izLc=Yx^3E^Oxf`?KKb z-9Zmr&RTEF<>bTqGQ-7c%z)96smL0w2Fzdxt9|uu0^D%wE(~_vzFUTI>{sW-2=y)I z4P8RfNje+u@NKsn($aWtlYa!g`}(mdzKfkST2BLXGsa9lW?Z8aqt>-1A~ITCJ*`5( z9x8J%TGM=(`;ZZ};L;Moa(~S`_F&Ex^wV$w|E}>eI2(Ld!AW$_HQcgV56{wHWHgY1 z{qe4$1&Rx~0KMf`&l?kor2=kaU0~SrmKp~a-{dTNIPvm%K3Fmk{6pZsx2K&)xhWq6 zZgzg{<9)Nc!QRqTZb4`8FRF7>n=x6m*+wE$J_R^@I#r4<0TdG9n4Mi7`!NWO` zk;VF@(V$&Y$oY_YLynZ3S4bWK9}b-m7@of&d#oO2@XMGd-t=%ID*M4-WR$e?9d4m< zjZ%zH_mE&cYe+_xk1shrVy#Dg1q4e9)@K=ZdE}TiJeJ)Sz&wEA?8BNMkahX2^}!Lh zd-2@Kv#yar@%Dmd4g%+QzBgn4f%Kk9Uc{%9wb#-n0)uqNA#KI6 zknj=eMu`5dtS43I=lC(x6f^tzOtnN*xvnakVFS4K2hrtDg9J#V0h|w6=%%#U_GBlO zmiKM`4;N92!?IcVpI65}#Dp}_zRq{|oeOL`>M@7W6Yq#Nv@Pcy%A)8>ZIIG+QaM}} zM0%ftl5NY442sB-DdYTQgA;6#j&2Ccg=U6N59KxcCi-95wtjhL#u`~a?O3*bC(J_f zm5U(96KLE9bg(NPZ?}SSrm-0#e&%<2`n$W~uXkPJl=g2qml{hBW*Dobti%Y#4c?G6`GttmzSl>Q}dY7Y&%#x`gkYX z4LQLjIpM*^!Js3D^&bx?dA;r^lzt?t-W{!~-l8-0VX$mZ-=c@97;{@k5{Km<(LP6$ zR@8E>F#3a`h;EFzL1Yo{SnzrU|L`v>yMwD|yHl&Ei7Hmp`bdV~hjv3vNWa$hwtrWp z6cNczPbUz*JA(!rC(y{}0?mS?YM?cb$&YD+uw?j(S6LOv>-0CabGF~+?9jrMR& znK|M9%@%(3nc1_651`ip-50n2bXdm|-=x zy9~|oL9n}6o?zioV_b8I^SfSof*{;QY#E38n=G2I7=wi1-K_L!xbd z3Pl%{0xqZ9$J4`>u`Npa9fq=Zlz{Jr$X~Xz`>N1!gq=Gnq+3`e1K(p6_LL+mU((PJD?EX;boE^2cq1q&@CPd7Ug5jZn&>1W&H z0l-lUw)NhIk531T2{<|--F}dDa^M?lYB(8(o^smU>9_z93}CeOoq6EFt^agK;57mO zeDQ{LnQ?;+4YZPnu@H=easJUSWDgsgeGzK_56H@?sL zr&XeQkNvm(FmSuM;{RVh%*7c@@jiL6+OYZGk4Y;n=u-oHGNGi7z_Wpb=?Fy{kNeeP|Qt3VZCi0$gC+%xJ+~d(3 zm$epj4`+EXm)(>x_Tw7^D5OhR-K=nU!Y#BpN}=`UBjnt|VUsn1X6}_=fsLl4<}Xy# z2@yGh^Gg&Q56AsXQpUN_pBlIi{GNL)XydB*=&bV<*J#l_Ty~?oo$$090%1TTJ~T$c z$15~HXBn=e{!Fd!cADiykXXx6S4Jc;hcAX|xlMk`^V;UNjp`Ac{Tu8bEJ8ROnw5K;FI-jAIu2i2eM-n9l&BFnfvN=}m%-AFw8^woBd&<_xMTj?fJIMBx?DypHLI1A#|ED>LSCx)dfESd7i__9?iBn zJ}R5pR71ySbZo50^h&e-6Ib*-<#rDJ(c{YB%2Xu*8epb%8^abMLAZMT8w1@y@iXdH zyzwGxwg3?t5gJkgEjlIk%j4}gPSs!Cda zg<)$LS7+H`w7C_Qm$KLPecYQm?(su6nVkln!=k}Ka;^_=C8!XkM!dh5%EjNBo{wP_ z{{vkxGykT=QBL!L5kRY)?I*~ptE<1ej2`+uoKR za?rFJ+O}s8H{`hPOyq_8%sdiH0Gn!UfNEO1_aL5(`_;vM&g5{uZp*V`OCFDa9mg@* zZes%8mNdtmjox~Q6kdmY+&EuG0RHR|_u}+w`Yz9!4E5~S-25qu4_ab$1swZ}YKcF! z0zIkWji6y7zuf*!dmZ~hsljFaCT!mk*<}c)JW=#Dy!$v4I#r=jzme^+=mZid_%#Dol8?bs=4-<%6Nb%n*mJY(HQ5-A$%2f?MHAsG%? z)}Tx2AP)SpyhudsALg@u>Cq71N{w^u_JWtq-fRQ2z99DQC-eIuU6ajT!>4sa0Rl7` zr;-=sSKH$j#ZRNPrbarT;`&lGmSy_Zm>eyaFl=R$M(Lpl^3m6d_8u4#PH6E09_e~= z|8yUtmNpmc)uusgaan^rq1SctnxW86B+Xg~5Ewcu%{U(LR{m3GcVk4|eRx+P8 z(1tITojokh&RR+>yyn+Ay(4IdpRm{1Hz`jN4~O(%WGl#RqFn@GO55d&{4+N^HM7!g zblAd1*G1fZ?Tyfx(RCPp>vsGhVArQ+5 zUvdU#)HsKgXsaq%Pcyb4Ih0ezF<*HZE=nd37gJkjG_0%i?<|F!6~46lUfx~%qkKo6 za<(x_)QG>52Cs~2!pQoK4nEbk*CR_2=b%m)d_q>Z^Xd6ANep58vLc-cKq`@fJ6X_z z!ALnu6+6)=v@aTk2FCVhhZj+dz=#fSDd$5ovQU~(2fxg-nHy~jn3?R}fT_l<&bHOh zka^>GIGicG2nZl?JYWj95kbF4h{(K~vp#TS#p~gHHAswxL=uDCz8nKt-$=JAdDf^1}t~weMoDZcpE;2v$g9{1@f`9r}&V>;!xLcxw)Bxyl z3G_0FkB?Vy3VSq16w2c?IV}LDKeU`BrL)sAvj?2s}`InofFfRpcnSk@2G$3`{d4RRoFI=|WC^-Ei2q=nB_aW4er@^Ft z`6qS+4_jA>*PbokG0z9Ykhx3V3?OQ+oQLf+qzkWzVLleBz>LJw($X?^1Q!-lH zBU`7RBW;28v%#qJ;e$9poLpU9O-@c$SkJQeMH3V5R{?)hkfo(gw3=rJrjA9p&HOm? zpT-_FPJ{L00IwP6APY3Jo}HbA>*?)f19r09#-RePg8=$BvITdAhlkU>dGqwQzSVr2 zmjIlfsPFmiv{WVVWpU7qzbUZHimooPm&nLZ=olD`Pk)WTUEX=Cm+9cUKRk8&PYW7P_E+8?HdGOaW**@H1s5B3FE4<~2bwMh0AB_8xkPBH08yi+_8OplW@a=2&Ap7YG#NR$ zAHV^zS|-APSw95W36 z78&2&3{}SYYYyTZ-e%-OBAA<=f7Oqw1=_L$^&kle39^=!%sV^8{R4mm6HRD)rV#NsNa*Fumml{SRO(|Ef7%*Fft^zjD(Ao4fWgurhJPth z>D`*yvvZ(961(jJukh{ByOYz?%97`3QeL7l5fP@VT`p<-PH+BX)pClS*LuKK6!+dg zBodb_sEM(1q~JZPc%xZu&*f~(-x&}7{q$|*2J_kByq_zQ0TTUUOFC-M>&ak>)GTcN zvpuQ$NW94I>+;#Qz_Gy-JU+UN>qr~rHfQqU9Z3IDWunc;-s{EJ@DzxJ0&&sLPIquL zygpsKi-q1r)yg<#Hrf62gfl^NjET=2_vc>E*QIfoqXi&tuG`}QZE$~$dL=NFHTVO~ zK5G_a^3%Jz;9e|F$S53PRU7m4-V^b=qWAm`&5r*~6^yzQltNJFbAi7LGiB$k7k14! z-rwQzgnMC|k>17S*=8mrlZO~P%WB?1#a&FiwBJns!Yq!^Sab%Gg&8pXBuuF>?!341 zjDRaO67Tu6n({6$xXVwiv9_1{-}|x2!ftlj?du7{N)v#+Q&C~ptlC)Cao;HbGFR1V z)7ou|*g^51QhxVgWJuH%g$wmMg%nm0+Oi9nu5mqq%6|Nw(G<>4fizmMVG@tG-ocvL zv$YYo|2?;6ihqqHm^R_!m}`&zg~79&@Wn*_s(yKU13a2wH7O@2rkqeeXomEfvg(vJ zR1#PLz{r=E8btp+o(I9uhwabTCuszKo6(iy{YLxE_F1GRdv&YzysEf-2y@RfJO!QT zcTZ+mJL}NIodx7J52CDr_w2;#?A5kA7PMzpungdM3z%Wg#tFt_(bI9KF zI$dc)Cv*26E+4micY)&isc~{2_y8;&ytwz=%pVn2Qj-x6YS5_c7P^G91K9OZ8ikB^ zi(ca~1HrYKeaMoyf6|`b9YQfr0c#V?Faa&y63T+ms`l@cND@qmgpzk(D=q4r5nJUe z9B~Wsp)TTfD(LFANHfiA9L@*d@~vXb1UjMvE9B5Ydbo((omGq2KP99sN-7zGF6iH? zmoLjvamOqnEosF};#7;oihiuZErLo!LVI{E;aa$C9-pu{5JoN9^1zYA6glC}0M_Mu zM^b{Kna;2w<)|%Kps32`j>J=Uo@+jck}Za0hXGhJbP4H?j&?IF|3&#==?p-k2JGZnR#;buGhg=tF@TDS%0LSTz-2f<4vdM3 z`H^%|@f+*wW@jw9U72)sh5rbcFC!CnRG6Jxr^+fS{xdb#PlmW==rpe4Vd+?YUG*Jj)e(qU`T1#c19O zEjNL_GnpRVvU|lE;lEhHr%RVOWYK-W^b0mtR`Bq-ed}Tfaa?#Hf`={TKEZA+PG6A* zTJvr35(nn(K=BUwC7ydH*TQL^q@~U8A{WK9(eisc8j)0fKsgH1x!D$c=`y;yx=YJe z^Tijr?FJ=VgGBlQGQZTGE>)=8m1i}m>9}muqN~$m#iL->75Hu8#FkC!p{*lf*pV{KQpj(+8Z_Ls?tDWF=LUSmR}V?q6p7-pl)q zOypW;v2eB5eS4J7kmkLLpyNGnX!AGguu+qv#9V~>Q0L}}zt>$r$w6?{$@%3%)@ka~`^!gQ(*4TnbNT%5zl1+6QkRtL6T5n6esqb>ssy|$`wRl+ZsZ&-5 z*E1+Ye;M?N;P$uiST})Wku7rjX4{;C?WCfGft}8o2y$QATIoB5Y2uUjA9MjL^Q` zyyqyfaQ?3{M0zqaQ7y+)czIx3auj6d$zR#I_yOKJRERrOTTNn;8jYgWFoqx3;xpzq z?cbDg?7V9-io8dZHoRe==FqXoqI)K*XDJXFs#m#&n6TqI5oX&t^uw5&eFW<{ZHT#3 z>y=>{q21aiu=7t+ZJo@KE3br)4;gGyPw~S-?h~9mbTx0fqo%h)srfE`+-xe;%gii` zYyX|%Vd)8&gSAd!nJ8H;AQZ>tP61>X$MITTQQ$l;X4SF!K|UwEt_~dedzTMuBk%e7 zMEW7cL4mvbVBtl7#naV=_u>3hxdNS)+v>*Y&^yGsUSt@C@11#g)3_a^=Sy^K*&8^^LQ?%(f8Y{cBXWRe!i*NAF%F1fv>HlsZ`W{FNI@t=r{f ze`gn9-*1;xx!jGw`)Dt`>{0yn_-JpK%I6bJTskiz>M$=W)6%K1Fl-l5YQBYBXDdBY6SwGhQhe?Kh?f;V$JNe<`AGn%l-a+ zmizAHq+dbSDlhy$7%LtTbJH@V&O1GQ@ezl7hypvzV3r%ne9Y%+Go@sq=`~@$GV4Ec z?;r{Qg#acwW|%>cm}asBD>ell8dkJ;g-yoq-ws<1l>`Jr!(`(6xxHGxQU0Bf^ za94S{{{8ehy=Z?7Nj|*|4j}yL;zW_l^FIBa7jPB$0t+~8U?4mY*37!n##z=eejKiA z3P6%u0LkQUITp%7I0czD&4wUNLB*^{HS8uCdR0x%2#OiD4L=*!OYcw$jws9t@2X_} zJcx9tGv#L7^I*dY$IbViYHDXYQ&A<6k?&B^B2!7N=Q$Jdz4Pe%!I&?QzS6T8?FMfG zn_-)}K4d0jWTW!X^YPJEc9-|pjkh`dnTiw?=Gk?qn;luX6!*ImvTm^T7*MlLJ{2vg zB{rHdo+NS(5lVLls~izCOIVuQSxl`~l?lf@L-lqNUZLZuB{#OZ5@EQ3yUO;DYSKk^ zqQp|&&MeVa!uq0dJ7kp*B}6mAM6Nku#Py?{T~JbqRs&+5G0pwwW;*0lD#vZv2Q&=l zlBJHUL9)>e!E5wwvG5_dVt+1V4XgnLYE5++GVCt7-*U^s#L~R1G9BONy02?9=)b@8 zWANK6^V8KEdO3SXsN(V^0vd`lw#%*KizscU9tcI-{-fv_pT% z6`(fPj0z(OdgIlK|8z$b$_l+Yy3S#Ju#&yY!WA-s8WI}@fbe0gz3DC`SgFE77J zAgo`Vm9PA)e~-i0`L&e%lllPpdJ*?z*ZgQNYUY!W7;}TKv9Dz{DoX?tnC~RnJGCLv zV=0y1F5KNGYrlDXV=n2xDHvVG3cG&Gv+q?=x0Nw+jS(kOM<8rpkd|-ui^NYyX|wTc zyp`qdi~8cS=4Zu#h0k5WQ%PfBI$`@TEAppj8!$F>z`)H+={8>;J53WiEL7Pb!uGY> zh0;yAT8t0ud7}jqjT=Hi{;z8)qBK>&?1T!vfKT4)MO{#flXe`XyKC#^(JgP5f=1Pd zVs9Ex*zPwn@VP(C#psoydR@wXfs;7Ei=g7}e6Bi9htsR#FRFAK(X(u##?(;jzn4b5 z<1bj@_T5l$99tOq-u1?viol?ue>+o`wEr)5if@zv&)Url*tbORK~SlkiJ*h0NU=!Q znL8qdrAR7;xu>u1>3KvupBz?k(9SNT;vIa~ioF2q9&1uFPZdbx#m~;?h}^3cZ_QNs z9$e=L6&6MVu=-Htpgs8O)~RB)sgm6sB-W(soM<-_0o~upO#+Bdj2q*_EO>2JqMeMA zf+|3y4nX$k&&c#raHDH?pW}qnb3YU+%i>Q$@_z8(-`F@x;aM^``;kM{2f?aDXEOf9 zibAn^1X`q2sn7OuvQ>!}n)#`%)mj^kc14MoxzZK-p$aq(ej1{0Kl9xp?)N@Z{lj;P zx0cUvCSx+NUQE7ncD2VhXb|KTs|8R?K%n$rB3OHMUCyH|VDZOuA&p&m4g?l?~ibY39XRt>+^i}A)L_V1M z0aiW4*Z#%Vzmnr}b7=Q#rQ(v?BQ2m>x_?10QgMClVm?d;Hi#$>wV8fU@8DeRsIh~x zeoSp6>8y8uLRuDl?KPMnbI2RIN<8tq{+;Y_p49crDMp>p`Sq^X*>d4qqQhLq13Q_L z<8ug}4TEpNhZrcMHidU#?5!r|!LwDaByDi69L*eGv{k&eo%4Sf8}Lj4hQy!gytv%l ziBLKQo0juJAgs^QZi_EML!al!1bxklM}P24%jNiNylDSDj>t8~z5b??paAR*xW2tV z2B>cGp1yNBU<`%uj-QUZnmqchTY!B>zq}K@b2g(vdfI6C%8$~&5u`b9oO8iqBtz~7=fMWd(0|Q|+ ze(*S}&7xsCuN+oUw;yz;XVqRwOG^gG6apIgq9#GUcR}x`swpR<{fNI=;$)pixHKvAbx zf-hW$u|ytAo+ogyMK&Tg-XBUxP3x)e@in-8&R%4=SR2@Vn||6e_GqVT5Q4ybHnZxn z2CUNkRpcrUZa?@d%(c2i%_3i%9-1$i8Ox`BueCc0X;h%`Y$dIvT?e{k0z;bcPpI=D z7PY-SGLDjEC8V0+>+K2-O*GL!fno_IoB9!OUu8I|yfo=RP?bMY!Vg zY*^N#pJ4F}@J*a+B4VqjADxpu89N4W;g7M}{^S28aLbE(B&-2h9Ur~y$1Bgagz>1q zml4{}x0xS!0#Fy(f2q_z)_Rowk*Wlq`#+xc&l^zA{g39wq^8~c430}My}!Ns%claz z?bVlrMN?zy_V)rPC~UsV zi@8}z_Lrxz2c5rz!?TK`16kY_P5BJE+me3FqNW&r5sVc6_hoCXxw+`dEqX1XgyCzi%Ldip zvEh4lwRG(L7{qdF_7*3EY%DTtE$R|7H%d58ZzwI&>$E<~gT+q0g-PCxrd^G?v{fIu zo;e6FAG$E|$UD3vZ0iye!||0Ztc_FW$nL5@>K^$qF%yn)vR*?gEwJ)JG7M1{lL>^W zg#;y}AwXT*mz!-?2)~%(#BMb_sPquS_oaiogQc7U8frDkLUAa{op3QdX4;TEti zhgdYs;BUcK?Q(ysdHYuR#g&4;rYiCXzR0RHMnST?M@iaLes))E(A(q-O)E%cuCN@f zgy57|v6^%ndIx#ZxsqmVNRYgT1J-e3w!d!}OS~ePg9*%uXa2p_97Xw8xb_!ipL@Bn zf&DT5jFu!+8713!?m|PLYKK=r!vNT?F(p>I=tXe1EYgC;WRUG#h6Z{PL#l2A174(K;uB@hYE9=jr(U+ughfUXvhL7NapPr8!VvaNDTH5e>RbGF=Jifc)_yZ~;L@f$K^?59D3DGrAh^Yt}x*Gml4S}4w8 z>h|bKNdFM>DwNFq5vS`*a)uJSSLLph{TgSP-W@bCCtF~~Kw_G`r)3fxx48*1|$fb zf;zOaRkdkT4Jo-_>mkGcii}Wv-o=}bm{sc<56VLY*UC+`q)hs=0mb^&SxOuH4rgs` ztf@g~NF|LThsj)@v-cH)An7jr5r$gZqHPo7GqyL!BwcLM2# zSkug7+z{aCL1{kkq#BIYs!~nR{s_9K3#OIskjAi+HEt%}cdD{bNn0<5vg=qwcEX2r z1ti|stUm>0Dg=ml55wh%CyFxar$Bys;Pb9(qb*QE_Giijp~@GwICwS~;t}Kj&SU?z z$g!c8CwV+tJ^Hlc5C&7qkgY553n1n)`l#ob;~D<+vvEHvi`$jqM{Vt|b`Gp^=xz4G zZcEXi<(%8+HGLK(E^t^w*-~xO0YNfkEaKk_(t@3_hECSb)6ayFJVj8@bNr)?xwvK; zm{XI{ee?peMt}EvG2N1r4oo5b)<$WGqc{=L!aU|>s(Xf~xkMKC^>AY-zIosJ1`4~H zdZ&0YSjK&n#J212v#I(3Vmf^ptgP>4hhW#fibw|&1vBaUOqaSIx<`(U?!gWoOKcqR z@H6TavGKMcTyi3-w^`8uRh2@kBMz!R zR403{{N+a2@MU;(>CQ&H*;cvTT9Ze7%S|29)tuJ)@l^R*Wpi5xq`Z6!Nu)7ucJ;|M ze%HWSYL-t)acDwsWq7L3PP?P0BAQ&nAD9Fh1>v7f8e3UeO<&qqSI^g)rn|~v#Xtha z)^`SD^;vy{w^($fISGjO%m!FD`t;U3U^WI9rORsxxN(CTl&^x`(Z&fG+$I7&XE8*|(hx#HLuq4f|`d zjps|{r{gf4KI+4J{vnbFm(k|?F-~4l%V*1NCQr=0Z}HQ0`Ky*6!1&8cvA+$M1vG!U z0=xQBGT1ez(f+90}2zd#hCV_{+0N|$9J-s|p{=?I&K9x&vx+B@PpMSRjC8U2Mg zvAG4(>g^V6%k^_>9lchNrO*~1!rplS@hdem4&~YcnX6TDO(OJL-`fvn;f8&_c)QfP#iloxV6xYT%g1}aD2MixK6~4W;YCb~op!EJ`+v_H7znbZl+V@9ko01Z9xAYfFdI!(!Efutoa zkb72p&zqI6xZ?>B`mGOD4`D^FZ}3gW&kN)t6$&Y6ye|kW_13POy~iIHHU=LE zn0k-?XguZ1|D=sz@wfl+m(eKwso*$2Kt55W!}HceX&V*(&%aL)B5bmk(7DDXM0s0d z0?#e34Mj^yul>SsqLL@3xP-;$DRuROjXfbCo%l22jR6OOHp4}!(csC9^D+Wc#N4zV zVY=iapoMPy_#uDxi$8W`rBKz2*@T1GR56g)7<#g4Z3~LNeT*I{*dtN33Ku{`MDu<7 zw5qnLG|^C5(fyv;x)W+=s;2TYj*CbRJT41N_{b+#<$rHBwBLZR>q`qI*&z!ZgcFq6 zHki6GrqgAS*P?5M0__P+cJbUPdUQE3;#Q5MIX25|)@Npghtj-E2Lf-Upa7I7=X@q;pm~E-4ep4X6S}(jDnPy+l zv4ESKB8qucm?0z#GEaXLe2kbU{%{3N%-Jvj$&I}lRe!t(_KILm)q}ZNz25q1bxf+H@AdHPEpF4L+^BidpV)YNKtk!ex z#*9_emD8AClf4{t+lF5oeD)j_2nC2~7sYJO2|w5|ZIoyQefr>57Nq4kudCB|@%3Og zOel7444JU#v^slf&{gD>{^&a|wWPV>Xa6T~!%e0edR%XOTqbNXQwQ5kwy-MRS!+nc zsde&{N&FsMBttz#Qlhq#^^%@qmvd*W41|yuPu9488uW$!?GyrZz4wrfEs=`!UJ!w? z^?;)92KCDWVe-N)o+3BXYT+&}$DldpSHbKU9gWVW<#I~l%hJcKa(AFV%m()vqZefh z7wY=W1e)sW3sk(!24gZ_o37oaRe+FLM5Yy{xQ>I6>9g4b#PX7a2zgW%vpTsL{ez6t z*h)pUPF6<&gaO~i*wi&9_szT!_W9X+$#3k+gB3DRN<|#{=AROPkF&>ka9=OLX1cC< z&~sx%rEOM~k0VA#c2vN*HEE3A{PWMNf*W(zX9aRu8iO0g&gnh3y$Sbms&P3RB&}V% z>~C!~FXc1jE|lJeTqIN3=C5bawrA!}fZL8QanlHiZht+xiJ76mPQx?_GCD<++Xo=f zUKu+o>ONJ%K#pZPiqwCWefzZZ{Qmu~=j6-xBA_z_lN`JtlMwhJUj#dE~=)chYc0095TOdL_l-wDwQTsr}OU9 z30F;hJ-YL$NyACh`UE<&+2Qn3CqDA%>NNxgBNF#`(|D5n5CY4AIm|YfZN77{F}=2s z2x<0(H?!}CwGZZ|+nu%*m{+U}BfB1!wi`HE!v#w{+O69Jlxe4!9WudK0z$%BU^l7a z>^4F>qj}JN!V3kXgCHJdW#zZZ%43IQDfPiX=gCUPorxzb3@7S2TOT)i{#PK_HXe-l zD_Jv|DnUZJQF=5>6n6CK9)ET)2L_cC_J92vHbVXXV}*#6|KoPzDW9>AYct{yhFU%z zATF8sxiuSAT3 zFZauTx_T)v|G2gwiTT#(XHG7zI60aZ@K3REaM-(uTc$V|;JHd#1n^OJd7ASMI zUTS&Gpd&qSiNCRh0NP``xtKiZBP>qiwt1N+88xVO`tM2+brt+iTm?siqcTDxvm!;X z3&}7$I6K%|KSY=3mbLk!67pc6g(R{4OR^=X1OE<`<&-FqU;1w#w50Z}+7v#C`;?g6 zLSboXnFpAkxE1%yD0#1RJb6z$0(nBP zMo+K?Nv5YOH5ed}`*ysAfk={42Ajw7HR=Eyo|jp$gez_o7*GyK4l9 zFV7!y5GJe}RCIP07CFNwzPnFZ%Xl5-(gI2?K7RZdt&?V9uv>?7qNp{zp?n{0z^B<0 zs8tlNSs6ZZ!@%d|NLZX~m$xy@frrZpm+=dgLG$qQP%b@&T!+k5rA@cGB_Y;~S?t;i zXquK<;FyKkUASdtxY2}DR84d5FwuO^T!0#577s!869Z_CA;QmLqvJ@ilFrr-|1b@6 zNJi=4^on(nky}0H37;=yWbO1STGPCqIDa;by>!OM*@<%ZWw{4b@hKy0Jf8DJabUn>Q1<=%PjmHH zL#+LC5L*`=-Ub&WYHDh!5|?UvYiP(}>Zh5Gb=ZoT%8czYq)yvvHig;ny&BsSXvfQB zHjroROctINT(|5_;CLW^c_ap19(#F|&^f&h8B|9$fGa#aRP5wQ%FyMGda_YpSq=3zlm5qHazEmTlfQwXezBI2zsuoz>gtcxvkYRo(Lf1M`|z9 zrVh@Y7<|6(ffnyL!MShSrJ?thmsZ1;>WJ;;qR4e;GezroZ`~8LgxKaW6kx5e)%34@ z%QL(F=JG#9Jh=LvaRy}l(DsMd9A;z202zCE`-FW5`=^ygHOPx=>$3~9goK3Do>h=6 zJ!|nSG&Z-SMH~0R8u{jTbzpI>>n8|1AdmR@1Xp0!w|@P90z_W^SsR{iHfR(6WjI8o z!jurn2q%h71izYbaO zvPe64OoE}Lk)sZ}?9FrzCNa=6lk^hWwG3{i==WWp@49V=z$WJA-cw;3PUOq9+$?G+ ztE)R(BP@=j{kY7&~)ASnp;i zqDHR4|HYFpUR*BAXEt^E_qtiv1J;_bSJ_R+stZEL%ECa)TD^L&xl=e7VUA?&dwbHY zL_9$HQ^|m5+Gpbe+MLrqUOLCi{=74GzVjE<`k!-}yuAW$ORLtBOHHi^@Vr%Zyv~2h z0R%NZhgqg56U{2YwO zz4o-2FAnB?$lm8pu6&1-CCjpDb(p;jD&M+e$QC@T_dIaPXj~l>xoFy7pT1 z%9y>vqWhXTfZ}U1s_@vV03I**i|6KG5}+?mn61V=hKSsUM=-l10fO5r$Quqp@i&ER z?M4JEu`|ud8r(b+@mZodV^2{85!8l7ioee*KvNg?Gw|t5Pg*7@r<{&BPWO6+NT$w0 zjy=NhwAY{>Lr6(6d@i?XNGy!L*2n_cclx$1S0Z08uh8AhcfB#k-4b2Vlg?EHh2O6w z;q3_5X+cE>d2BD4mmjiyw(fP6oJMOG&Kh^Mhz3URAH4}8cB*dkdb{P(ouce9{reL< zWZbL)i=xlmqFfp93}$RTA740^L|Kb)ls~LjaUPaY>k392Ws9BU7`f)k@!o)O5V((g zcC}e4^Ip{L<&rym6%P*n%mgpBxxQU>gp%+%fg~mn1%WIfq=Aw+h$H2kV5~d=LZ_hO#b{iJ~=5aNJ5(? z7e8QPpSkXaksKl|@fr&vevBg4vR4Q@?W-QE{4PA0IMC~Qvn+8hQ)Xj4&prErV?J4^ z^Q6zSF!}{X`h`NWdkScbW$IP6jggg4r_k8POouDZmp3zJ2iF3M?Uf;}Q@sfq(lR|E zrUZEZIa7#RNmh^c_>NU)ns`#skUOKGvNFz^JHikTFWAo_9m1GwN_-4c;_Fs2yDkO7 zMpH)JrnAeJMfKpr=H;Px9_Zy&;R>t%=u2Hp)1wqdO*MA+Ov{&pcY?JA2)U|_N5L$| zjm3)O)@N-a3$7yBSGRVqUM|RW_(sD!(?_%A3mx&8AwoWv>^%2l50%A2{R`SY-RR3) zAFInFLN}Bcae%^Fk?9ctjPcn6ND_zooyGnHvyrZUqFa-EWfi4I&*1;=^kn&crNW6o z73X5_V$!Z-oR)uukDb2v_i%R(Ac!`TN%G5Gkx<~iuKX)wxz0WKuPxU9?AZL@eXARH zeRZ3~_Y_GK+b~t^Fr|-5qI9pm2&HWpIB-OjmQtzEtE8dS*^pM+y`}2%w}2ux|5up7 z$F}|U%F8F_cprT@|66HPD=KP>mIkC@LdE&#&(-rAdV0DqfZoSMj15)2?!ItfJt6c7;UkSHC6(4;pJq=lk_hF%O+AVBCP z@pr)cX5PH{&AiOyk7RPP_ndw9{?=aW+v}SOWC~oPur+T62&mpVdAcB$tO16kVSR!l z!TXveHw%57!%V%j1l<1_H}L+I&SzbOBY95+1C-VQ_x-XeZ<^)T8W(JSimc7O*qoRB zJ3v72x`pZH7&lD%p#6Jq)PYzk#o86Bh?i|C$w#g8OPsVqX5-x=Ex)t_RZ3^{`JgaL z9+&fxw&q;P-u7x+!zin7v2GPey!5#aS-^2Zy)^zsk!+gT;;p9Fw{b9vs1KrJeWyIM zU!Jz3Pf+id>4QgI(!$V1gFli&*K__Y!_WOYg0GJyK|t^ZKwFt)^p79+A!#hSh<+#n zu84|fb)Ia;$V7@1a8G*~wP)}nzYCCsFo`ZA&#};{w^#)1h+SRyfzCUx@RNN3i}6#e zcugE@k8B!@u%MYne)S4Cp!|ur@x-AwZjzw)IjVs@--L_3j7H+lhbJswNu9bwcPl+4 z0{J+GBU082#e1{NHOg`rA$nuxRS0fY9p+^HhyBInv)-bMS^tIE$VB`oHH*83i+c`o4i`)8t^{pUvh~kB|JDJZf1hBdf$_<%R`n$=l@-oR9b`2vdQ!Rg)Dg?); zhBDI{eaN#~Onn<-0%Zk0Q6BxtdTtwN(-Wht+i@YLw7 zzK+#lyMM%@=GnRnb8i_bme!YDcgxS%+XZbW$p7hVn(?YlbTCN0Hw}u zTsVNSz-j05)L!+S7Wnvwz7apBZ|EhC`5O(}KwEoS7LWr+=c_e&8BGV1*-&!`h74tk zNa`&7s$rDn{culA5-jwqMuvsWf9_t)PW87h=oPjL-&6?tiTVZI!_rEE*g>BK%U`bi z{OD~f^j46d!VP=rHDwh%oP-LKdTef?2<_SZO`2(Z@3_6sC;w+}Y{?gPZLX7X?zUqO zn>T#G9%7P|yi-@oc{Rrg!Fw+x(aF{ns9R9VdS2B|pczIt_TWpPuR~k(lvgc%9g?@U zsGm@p$B`@BXHpt8vkvCe=OR}q;n-N%nsKP(94X@N6YtxKFnYq0K;sGm37E|$Pi(gj zo@10ADb`4{C3D0hHg|Ee@L-=en}=8Grrit!`h<>1^SpZ#r{sh88R7;S;&-ve5e{1B zdn0ZeYc&%ck;&+2t-=?ZZhKrw!=GAdiIwnXxt?nl}EA3!uKDXh%c$E#30fd5(AZ=Pf$hJE!ax zz&}4kakn8K;(PAp>KE$>WyTr19&nUS4B-)#;dnIpe2O=mGpo;g3Z1zek^OH50=>e`Ep7E z67h91pxQZK<@R|5sk=6$jK&!s7GL zEBRu0IUW3Cv|?$VjdZ`2X0DG;#xVxyhwyL`Ty&rm2DD zM~PX#P#p9VliE6Mzqt(dg@w!4p=6+#eACCxzm1D7t(UJ~P8!UNs&oH+F_ zzm)UFGJ;hv;?MC%q1Z4_-uvg8SZw0qXD+p+epMbLzHK?V_=+q*wo3|}`Q$4Ab00KN z++2y!{@LyF@_~q^6nOgK($?i>|5bZLOgH_7%;VLlt6L9HF!&{Wsy#_=?|H3W4a`Pg znb}UQbA8294*T)&v2&(>i)ClUd&hG3AYp>uF@Mj_YeIkib*F84XrZ>Y7y*#4Q=Bow zJ%!m~)mOUm=OuY>TMgglD@7U#FkgGeJh|C^-v{k<@r}y~zmwU>C@eNGFaU~jvOncz zl|m^J&O^L@`uJD1nvsJ*3dJMRs0Djq0*ox@N!CM`j@|@8WZ|4ckB3#daY*7amkv(8 z;G8=5nUgvwv-bHK4G_s%+}W49qRSz~o!VaX?9+q5^VC(O@|M6OF7$DSl}C7S)bA2c z9{~`P(i{v311@8tdsAQy^vGXTu!trz!RHUSfM}jyG{tsH`-Pcg{zD))8UuZIL} zy^>$;dHwt6)PYpaTuG$>fpX#GKf95zIu80D zT~SgyM))b38Co>(l1MU>b>EH+ne!u^_$GFB>!*40svRsZXx&S zi=pWCD*PSQYC!YdHJW50SB>F_UY{fR@0x;$V8EP20*n|oxAgQpHIJDr{mi~wd8Oxs zF#OYQ92GK`qCQ+Tu@&LBm?L|-WV~$_TV?F%pK6}gDKJRa$ymVa^Zn)=Y@H{Bo{119 z&-vnJ7A^MdYQs$5^>Ila|0F}mJ3GV3u`>-dnrQa+bJGaDm`9wm4Td!wib~7^D|x`GVkMSN%RY72kw~W?fv!y3^x@e%x`>F8n2zSg4DjCr zUt8N%XlNA8U7nu4IBvpK{FnAaPAyHrd-nV@Hd4Yy0H$NV$do5Cz2il?65vWRBC1q- z_B_&ZFfswo@a$`w_pLK;niwj~nN4nth{?DVjuDot7SZZ~sEa3&L*Fw%B%S3Qhrfu$ zbTs(phh5X`zWuiC;<^@O>sxH$ zNj-(m3`#VHhvza~q;@IAEM)KmlX&`H)VlZ+y(f1IThkk5p1AsAo$bf>?UWU5H#@w* z>llZ-)Xf9sAitQ%`PJ{1lshjRnlJfHW0pIKPz!vbqrohAuqnP8=_z1MZBeXqv0`jE z>`s-2H&8G1?~fspT??F>D{mlaOX2XY%3ara4n1*lNFa4pZc?#>ezoL?{CqCRR#&8? zSvb^Q1s?c2Ey&L)1loz9986eJ3w(UcI(_DO zwD(;wTd+N}ImPaEuN1-vbE_I5zve&QW)U26wls6-&+QvBx=r2@0q$KFbB<~D^Qf*n zIl$ZkxvHm;bwP`xm=_NI;>N9trA5MxE-GMGXZr{FJT*@w{q)>pT)i*8YZ>!pTA%mm z3sacf_$Ye6Rzod#6$wRC@tO6X@+MY>skk~1Y3hvjeI_9&)or27#UiGZ>LKwK0mk}I zi3bI@TFjUF==chP@q;Z>Jx}L|I+O!FHUF}yM78V;p9-}Ow`F{tQ|7apuk$$m0sD?o ztC)Qqx~tLt;JTvNZ|Q8GxQ5NSLx zvFxsrg7fC39>C}aI;|=muZ~k{0xoZAUmJveQ2h+TAZ;8nbhDe2B6n^g25HgNQ-5c` zHWuG0Toq+x_5q!p8eEu6#`M@jR*=5k(SA|Mu`_1|G+h15v!0r#Jato8hcCl$NwgATL=1G_w1jm`!}qm6n}dvH`J75qqffmZ5xzGTJnfa z(XI#^NXC^5(dQ0Ao5VkdpH`!zSVog}D+ioUWMR+q#`!=5tifKJdk;RCVILzh*^N{E zV#Y#m$o1-_82n6FmTf$+OW`Y!Rj8WiH+8NPd~RApRKht=Ul(^uuc+1UbTjUr=dukI zW6;jFYsf+vG(27A3DLbw{4$;?LgcBvadp)ufTn=RBiOyzKR}upP=#C{Tevt`L%iHx z#LUyqA$qDUe2i}xOL9?BINZ%+HrQlmSzv#zXqe#{rzZv)sKb7&^Cw?^J;%ARS!(*~ z`cM$QT{T|#ksR?l?>SMn?+i;ZPdl9jnx&g8qjT04uF2(}2pZZn4m}Qgi&vbl@0Nm; zk5ngUVwwI@ZPY^bLxF(BCv-=9hDreAFz=lYzCTa`3uwB9WSzPp zdAEh0G3#t{zn;ltRp>Ril}si<$KY>nM~|75K4@SSmjUr`6@m~qE;KvNw8N1p-ZVUwZMyd%BFs_%P!bZV$~Z1~mt z1v7Km5*_8a{+#zwLfFQNS9=`_3c2+TD-AVoC5BgangY{s16+p{YWaH#wj@763nNu* z;#qBHd5l#lhtqIBlA~vTF1WuS$V<@o$LNPu=egZ}Ka$xvYy(FSfBD+8lP+4l_#dN| znrGO8j*Zpm3fTDJ)%nSiUTJwTpSaC&gX!_n-HtsZg3cG7n&&k2zRXK12PK^nd#ya+ z{jw77*6!iRCZskl;}{=zxxXPTLxKc)a67i7DfOOTn`oMltetp(h0ihQ{L(J^m-W9u<~SIj^bh7ByU+OlFE|r7mTrq!z6l@sdM}!^?A6d%wH2t& znfSl1q}b~%lpLi}G2y{UCq)Gl5@403c%Zu@1QZB$?o&F->Ux!+Fy(0ta>pC!>@jNy zgrMW0u61<;`LJ(Eo{WCfF`{B^lXrj2#Nu%4Hk8on9&1?xRJ8_D{Jw~br95nvVd)`1 z9jdufUSfmG5K=~r==6{^c_Qde($AIafZt@wve385?9F%^*O4>ut!Hi&^1$418KRnf z{Wr2{3Hsz5gUrEgm*e)LGK+!vRc6D($c?q=s#h798B`C5F{OqgePon~kXG94yp{1MdRei65v%}pNzjf4>Mkv^#VHqwxa zNArNEATRPIbB`6l0dVbbnW#_7&ljy2alws>v_*9i- Date: Wed, 2 Aug 2017 13:46:42 -0500 Subject: [PATCH 54/95] SOLR-10310: Ref Guide updates for sow param default --- .../src/field-type-definitions-and-properties.adoc | 2 +- solr/solr-ref-guide/src/the-extended-dismax-query-parser.adoc | 2 +- solr/solr-ref-guide/src/the-standard-query-parser.adoc | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/solr/solr-ref-guide/src/field-type-definitions-and-properties.adoc b/solr/solr-ref-guide/src/field-type-definitions-and-properties.adoc index c3c1b5d0b0f..205aa83b547 100644 --- a/solr/solr-ref-guide/src/field-type-definitions-and-properties.adoc +++ b/solr/solr-ref-guide/src/field-type-definitions-and-properties.adoc @@ -90,7 +90,7 @@ For multivalued fields, specifies a distance between multiple values, which prev `autoGeneratePhraseQueries`:: For text fields. If `true`, Solr automatically generates phrase queries for adjacent terms. If `false`, terms must be enclosed in double-quotes to be treated as phrases. `enableGraphQueries`:: -For text fields, applicable when querying with <>. Use `true` (the default) for field types with query analyzers including graph-aware filters, e.g., <> and <>. +For text fields, applicable when querying with <> (which is the default for the `sow` parameter). Use `true`, the default, for field types with query analyzers including graph-aware filters, e.g., <> and <>. + Use `false` for field types with query analyzers including filters that can match docs when some tokens are missing, e.g., <>. diff --git a/solr/solr-ref-guide/src/the-extended-dismax-query-parser.adoc b/solr/solr-ref-guide/src/the-extended-dismax-query-parser.adoc index 3b0bd49bc31..4b042bdd7c1 100644 --- a/solr/solr-ref-guide/src/the-extended-dismax-query-parser.adoc +++ b/solr/solr-ref-guide/src/the-extended-dismax-query-parser.adoc @@ -38,7 +38,7 @@ In addition to supporting all the DisMax query parser parameters, Extended Disma In addition to all the <>, Extended DisMax includes these query parameters: `sow`:: -Split on whitespace. If set to `false`, whitespace-separated term sequences will be provided to text analysis in one shot, enabling proper function of analysis filters that operate over term sequences, e.g., multi-word synonyms and shingles. Defaults to `true`, so text analysis is invoked separately for each individual whitespace-separated term. +Split on whitespace. If set to `true`, text analysis is invoked separately for each individual whitespace-separated term. The default is `false`; whitespace-separated term sequences will be provided to text analysis in one shot, enabling proper function of analysis filters that operate over term sequences, e.g., multi-word synonyms and shingles. `mm.autoRelax`:: If `true`, the number of clauses required (<>) will automatically be relaxed if a clause is removed (by e.g. stopwords filter) from some but not all <> fields. Use this parameter as a workaround if you experience that queries return zero hits due to uneven stopword removal between the `qf` fields. diff --git a/solr/solr-ref-guide/src/the-standard-query-parser.adoc b/solr/solr-ref-guide/src/the-standard-query-parser.adoc index b58c4f365e4..48af171f78f 100644 --- a/solr/solr-ref-guide/src/the-standard-query-parser.adoc +++ b/solr/solr-ref-guide/src/the-standard-query-parser.adoc @@ -36,7 +36,7 @@ Specifies the default operator for query expressions, overriding the default ope Specifies a default field, overriding the definition of a default field in the Schema. `sow`:: -Split on whitespace: if set to `false`, whitespace-separated term sequences will be provided to text analysis in one shot, enabling proper function of analysis filters that operate over term sequences, e.g. multi-word synonyms and shingles. Defaults to `true`: text analysis is invoked separately for each individual whitespace-separated term. +Split on whitespace. If set to `true`, which means text analysis is invoked separately for each individual whitespace-separated term. The default is `false`; whitespace-separated term sequences will be provided to text analysis in one shot, enabling proper function of analysis filters that operate over term sequences, e.g., multi-word synonyms and shingles. Default parameter values are specified in `solrconfig.xml`, or overridden by query-time values in the request. From 9662f2fafbdff629464fb3c248965941414b98d6 Mon Sep 17 00:00:00 2001 From: Cassandra Targett Date: Wed, 2 Aug 2017 13:51:06 -0500 Subject: [PATCH 55/95] SOLR-10310: Ref Guide updates, missed saving wording change in 1 file --- solr/solr-ref-guide/src/the-standard-query-parser.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solr/solr-ref-guide/src/the-standard-query-parser.adoc b/solr/solr-ref-guide/src/the-standard-query-parser.adoc index 48af171f78f..7c49d623c9a 100644 --- a/solr/solr-ref-guide/src/the-standard-query-parser.adoc +++ b/solr/solr-ref-guide/src/the-standard-query-parser.adoc @@ -36,7 +36,7 @@ Specifies the default operator for query expressions, overriding the default ope Specifies a default field, overriding the definition of a default field in the Schema. `sow`:: -Split on whitespace. If set to `true`, which means text analysis is invoked separately for each individual whitespace-separated term. The default is `false`; whitespace-separated term sequences will be provided to text analysis in one shot, enabling proper function of analysis filters that operate over term sequences, e.g., multi-word synonyms and shingles. +Split on whitespace. If set to `true`, text analysis is invoked separately for each individual whitespace-separated term. The default is `false`; whitespace-separated term sequences will be provided to text analysis in one shot, enabling proper function of analysis filters that operate over term sequences, e.g., multi-word synonyms and shingles. Default parameter values are specified in `solrconfig.xml`, or overridden by query-time values in the request. From 211d106cc23adb1206479d2f7b455d4ded1da4fc Mon Sep 17 00:00:00 2001 From: Anshum Gupta Date: Tue, 1 Aug 2017 12:17:26 -0700 Subject: [PATCH 56/95] SOLR-11126: Node-level health check handler, with SolrJ support --- solr/CHANGES.txt | 2 + .../org/apache/solr/core/CoreContainer.java | 8 +- .../java/org/apache/solr/core/NodeConfig.java | 18 ++- .../org/apache/solr/core/SolrXmlConfig.java | 3 + .../handler/admin/HealthCheckHandler.java | 117 ++++++++++++++++++ .../solr/cloud/HealthCheckHandlerTest.java | 89 +++++++++++++ .../solrj/request/HealthCheckRequest.java | 61 +++++++++ .../solrj/response/HealthCheckResponse.java | 39 ++++++ .../solr/common/params/CommonParams.java | 7 ++ 9 files changed, 341 insertions(+), 3 deletions(-) create mode 100644 solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java create mode 100644 solr/core/src/test/org/apache/solr/cloud/HealthCheckHandlerTest.java create mode 100644 solr/solrj/src/java/org/apache/solr/client/solrj/request/HealthCheckRequest.java create mode 100644 solr/solrj/src/java/org/apache/solr/client/solrj/response/HealthCheckResponse.java diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index bd56c6a2c18..fd8a1e8e238 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -63,6 +63,8 @@ New Features * SOLR-10858: Make UUIDUpdateProcessorFactory as Runtime URP (Amit Sarkar, noble) +* SOLR-11126: Node level health check handler (Anshum Gupta) + Bug Fixes ---------------------- diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java index 53a3bb31653..0b789f8fba4 100644 --- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java +++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java @@ -22,6 +22,7 @@ import static org.apache.solr.common.params.CommonParams.AUTHZ_PATH; import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH; import static org.apache.solr.common.params.CommonParams.CONFIGSETS_HANDLER_PATH; import static org.apache.solr.common.params.CommonParams.CORES_HANDLER_PATH; +import static org.apache.solr.common.params.CommonParams.HEALTH_CHECK_HANDLER_PATH; import static org.apache.solr.common.params.CommonParams.INFO_HANDLER_PATH; import static org.apache.solr.common.params.CommonParams.METRICS_PATH; import static org.apache.solr.common.params.CommonParams.ZK_PATH; @@ -80,6 +81,7 @@ import org.apache.solr.handler.SnapShooter; import org.apache.solr.handler.admin.CollectionsHandler; import org.apache.solr.handler.admin.ConfigSetsHandler; import org.apache.solr.handler.admin.CoreAdminHandler; +import org.apache.solr.handler.admin.HealthCheckHandler; import org.apache.solr.handler.admin.InfoHandler; import org.apache.solr.handler.admin.MetricsCollectorHandler; import org.apache.solr.handler.admin.MetricsHandler; @@ -135,6 +137,7 @@ public class CoreContainer { protected CoreAdminHandler coreAdminHandler = null; protected CollectionsHandler collectionsHandler = null; + protected HealthCheckHandler healthCheckHandler = null; private InfoHandler infoHandler; protected ConfigSetsHandler configSetsHandler = null; @@ -523,6 +526,7 @@ public class CoreContainer { createHandler(ZK_PATH, ZookeeperInfoHandler.class.getName(), ZookeeperInfoHandler.class); collectionsHandler = createHandler(COLLECTIONS_HANDLER_PATH, cfg.getCollectionsHandlerClass(), CollectionsHandler.class); + healthCheckHandler = createHandler(HEALTH_CHECK_HANDLER_PATH, cfg.getHealthCheckHandlerClass(), HealthCheckHandler.class); infoHandler = createHandler(INFO_HANDLER_PATH, cfg.getInfoHandlerClass(), InfoHandler.class); coreAdminHandler = createHandler(CORES_HANDLER_PATH, cfg.getCoreAdminHandlerClass(), CoreAdminHandler.class); configSetsHandler = createHandler(CONFIGSETS_HANDLER_PATH, cfg.getConfigSetsHandlerClass(), ConfigSetsHandler.class); @@ -640,7 +644,7 @@ public class CoreContainer { } finally { if (asyncSolrCoreLoad && futures != null) { - coreContainerWorkExecutor.submit((Runnable) () -> { + coreContainerWorkExecutor.submit(() -> { try { for (Future future : futures) { try { @@ -1470,6 +1474,8 @@ public class CoreContainer { return collectionsHandler; } + public HealthCheckHandler getHealthCheckHandler() { return healthCheckHandler; } + public InfoHandler getInfoHandler() { return infoHandler; } diff --git a/solr/core/src/java/org/apache/solr/core/NodeConfig.java b/solr/core/src/java/org/apache/solr/core/NodeConfig.java index 5b4debe5e0f..ce7fe2792cd 100644 --- a/solr/core/src/java/org/apache/solr/core/NodeConfig.java +++ b/solr/core/src/java/org/apache/solr/core/NodeConfig.java @@ -47,6 +47,8 @@ public class NodeConfig { private final String collectionsAdminHandlerClass; + private final String healthCheckHandlerClass; + private final String infoHandlerClass; private final String configSetsHandlerClass; @@ -74,7 +76,7 @@ public class NodeConfig { private NodeConfig(String nodeName, Path coreRootDirectory, Path solrDataHome, Path configSetBaseDirectory, String sharedLibDirectory, PluginInfo shardHandlerFactoryConfig, UpdateShardHandlerConfig updateShardHandlerConfig, String coreAdminHandlerClass, String collectionsAdminHandlerClass, - String infoHandlerClass, String configSetsHandlerClass, + String healthCheckHandlerClass, String infoHandlerClass, String configSetsHandlerClass, LogWatcherConfig logWatcherConfig, CloudConfig cloudConfig, Integer coreLoadThreads, int transientCacheSize, boolean useSchemaCache, String managementPath, SolrResourceLoader loader, Properties solrProperties, PluginInfo[] backupRepositoryPlugins, @@ -88,6 +90,7 @@ public class NodeConfig { this.updateShardHandlerConfig = updateShardHandlerConfig; this.coreAdminHandlerClass = coreAdminHandlerClass; this.collectionsAdminHandlerClass = collectionsAdminHandlerClass; + this.healthCheckHandlerClass = healthCheckHandlerClass; this.infoHandlerClass = infoHandlerClass; this.configSetsHandlerClass = configSetsHandlerClass; this.logWatcherConfig = logWatcherConfig; @@ -146,6 +149,10 @@ public class NodeConfig { return collectionsAdminHandlerClass; } + public String getHealthCheckHandlerClass() { + return healthCheckHandlerClass; + } + public String getInfoHandlerClass() { return infoHandlerClass; } @@ -209,6 +216,7 @@ public class NodeConfig { private UpdateShardHandlerConfig updateShardHandlerConfig = UpdateShardHandlerConfig.DEFAULT; private String coreAdminHandlerClass = DEFAULT_ADMINHANDLERCLASS; private String collectionsAdminHandlerClass = DEFAULT_COLLECTIONSHANDLERCLASS; + private String healthCheckHandlerClass = DEFAULT_HEALTHCHECKHANDLERCLASS; private String infoHandlerClass = DEFAULT_INFOHANDLERCLASS; private String configSetsHandlerClass = DEFAULT_CONFIGSETSHANDLERCLASS; private LogWatcherConfig logWatcherConfig = new LogWatcherConfig(true, null, null, 50); @@ -236,6 +244,7 @@ public class NodeConfig { private static final String DEFAULT_ADMINHANDLERCLASS = "org.apache.solr.handler.admin.CoreAdminHandler"; private static final String DEFAULT_INFOHANDLERCLASS = "org.apache.solr.handler.admin.InfoHandler"; private static final String DEFAULT_COLLECTIONSHANDLERCLASS = "org.apache.solr.handler.admin.CollectionsHandler"; + private static final String DEFAULT_HEALTHCHECKHANDLERCLASS = "org.apache.solr.handler.admin.HealthCheckHandler"; private static final String DEFAULT_CONFIGSETSHANDLERCLASS = "org.apache.solr.handler.admin.ConfigSetsHandler"; public static final Set DEFAULT_HIDDEN_SYS_PROPS = new HashSet<>(Arrays.asList( @@ -302,6 +311,11 @@ public class NodeConfig { return this; } + public NodeConfigBuilder setHealthCheckHandlerClass(String healthCheckHandlerClass) { + this.healthCheckHandlerClass = healthCheckHandlerClass; + return this; + } + public NodeConfigBuilder setInfoHandlerClass(String infoHandlerClass) { this.infoHandlerClass = infoHandlerClass; return this; @@ -366,7 +380,7 @@ public class NodeConfig { public NodeConfig build() { return new NodeConfig(nodeName, coreRootDirectory, solrDataHome, configSetBaseDirectory, sharedLibDirectory, shardHandlerFactoryConfig, - updateShardHandlerConfig, coreAdminHandlerClass, collectionsAdminHandlerClass, infoHandlerClass, configSetsHandlerClass, + updateShardHandlerConfig, coreAdminHandlerClass, collectionsAdminHandlerClass, healthCheckHandlerClass, infoHandlerClass, configSetsHandlerClass, logWatcherConfig, cloudConfig, coreLoadThreads, transientCacheSize, useSchemaCache, managementPath, loader, solrProperties, backupRepositoryPlugins, metricsConfig, transientCacheConfig); } diff --git a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java index 8cdf94773e9..99f0b517637 100644 --- a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java +++ b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java @@ -241,6 +241,9 @@ public class SolrXmlConfig { case "collectionsHandler": builder.setCollectionsAdminHandlerClass(value); break; + case "healthCheckHandler": + builder.setHealthCheckHandlerClass(value); + break; case "infoHandler": builder.setInfoHandlerClass(value); break; diff --git a/solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java new file mode 100644 index 00000000000..03c7bd47d1c --- /dev/null +++ b/solr/core/src/java/org/apache/solr/handler/admin/HealthCheckHandler.java @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.handler.admin; + +import java.lang.invoke.MethodHandles; + +import org.apache.solr.common.SolrException; +import org.apache.solr.common.cloud.ClusterState; +import org.apache.solr.common.cloud.ZkStateReader; +import org.apache.solr.common.util.NamedList; +import org.apache.solr.core.CoreContainer; +import org.apache.solr.handler.RequestHandlerBase; +import org.apache.solr.request.SolrQueryRequest; +import org.apache.solr.response.SolrQueryResponse; +import org.apache.zookeeper.KeeperException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.apache.solr.common.params.CommonParams.FAILURE; +import static org.apache.solr.common.params.CommonParams.OK; +import static org.apache.solr.common.params.CommonParams.STATUS; + +/* + * Health Check Handler for reporting the health of a specific node. + * + * This checks if the node is: + * 1. Connected to zookeeper + * 2. listed in 'live_nodes'. + */ +public class HealthCheckHandler extends RequestHandlerBase { + + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + CoreContainer coreContainer; + + public HealthCheckHandler(final CoreContainer coreContainer) { + super(); + this.coreContainer = coreContainer; + } + + @Override + final public void init(NamedList args) { + + } + + public CoreContainer getCoreContainer() { + return this.coreContainer; + } + + @Override + public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) throws Exception { + + log.info("Invoked HealthCheckHandler on [{}]", coreContainer.getZkController().getNodeName()); + CoreContainer cores = getCoreContainer(); + + if(cores == null) { + rsp.setException(new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Core container not initialized")); + return; + } + if(!cores.isZooKeeperAware()) { + rsp.setException(new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Health check is only available when running in SolrCloud mode")); + return; + } + ZkStateReader zkStateReader = cores.getZkController().getZkStateReader(); + ClusterState clusterState = zkStateReader.getClusterState(); + // Check for isConnected and isClosed + if(zkStateReader.getZkClient().isClosed() || !zkStateReader.getZkClient().isConnected()) { + rsp.add(STATUS, FAILURE); + rsp.setException(new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "Host Unavailable: Not connected to zk")); + return; + } + + try { + zkStateReader.updateLiveNodes(); + + // Set status to true if this node is in live_nodes + if (clusterState.getLiveNodes().contains(cores.getZkController().getNodeName())) { + rsp.add(STATUS, OK); + } else { + rsp.add(STATUS, FAILURE); + rsp.setException(new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "Host Unavailable: Not in live nodes as per zk")); + } + } catch (KeeperException e) { + rsp.add(STATUS, FAILURE); + rsp.setException(new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE, "Host Unavailable: Not connected to zk")); + } + + rsp.setHttpCaching(false); + + return; + } + + @Override + public String getDescription() { + return "Health check handler for SolrCloud node"; + } + + @Override + public Category getCategory() { + return Category.ADMIN; + } +} diff --git a/solr/core/src/test/org/apache/solr/cloud/HealthCheckHandlerTest.java b/solr/core/src/test/org/apache/solr/cloud/HealthCheckHandlerTest.java new file mode 100644 index 00000000000..3baa1811ebf --- /dev/null +++ b/solr/core/src/test/org/apache/solr/cloud/HealthCheckHandlerTest.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.cloud; + +import java.io.IOException; +import java.util.Set; + +import org.apache.solr.client.solrj.SolrRequest; +import org.apache.solr.client.solrj.SolrResponse; +import org.apache.solr.client.solrj.SolrServerException; +import org.apache.solr.client.solrj.embedded.JettySolrRunner; +import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.client.solrj.request.GenericSolrRequest; +import org.apache.solr.client.solrj.request.HealthCheckRequest; +import org.apache.solr.client.solrj.response.HealthCheckResponse; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.params.CommonParams; +import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.zookeeper.KeeperException; +import org.junit.BeforeClass; +import org.junit.Test; + +import static org.apache.solr.common.params.CommonParams.HEALTH_CHECK_HANDLER_PATH; + +public class HealthCheckHandlerTest extends SolrCloudTestCase { + @BeforeClass + public static void setupCluster() throws Exception { + configureCluster(1) + .addConfig("conf", configset("cloud-minimal")) + .configure(); + } + + @Test + public void testHealthCheckHandler() throws IOException, SolrServerException, InterruptedException, KeeperException { + SolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, HEALTH_CHECK_HANDLER_PATH, new ModifiableSolrParams()); + try (HttpSolrClient httpSolrClient = getHttpSolrClient(cluster.getJettySolrRunner(0).getBaseUrl().toString())) { + SolrResponse response = req.process(cluster.getSolrClient()); + assertEquals(CommonParams.OK, response.getResponse().get(CommonParams.STATUS)); + + JettySolrRunner jetty = cluster.getJettySolrRunner(0); + cluster.expireZkSession(jetty); + Set live_nodes = cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); + + int counter = 0; + while (live_nodes.size() == 1 && counter++ < 100) { + Thread.sleep(100); + live_nodes = cluster.getSolrClient().getZkStateReader().getClusterState().getLiveNodes(); + } + + try { + req.process(httpSolrClient); + } catch (HttpSolrClient.RemoteSolrException e) { + assertTrue(e.getMessage(), e.getMessage().contains("Host Unavailable")); + assertEquals(SolrException.ErrorCode.SERVICE_UNAVAILABLE.code, e.code()); + } + } + } + + @Test + public void testHealthCheckHandlerSolrJ() throws IOException, SolrServerException { + HealthCheckRequest req = new HealthCheckRequest(); + try (HttpSolrClient httpSolrClient = getHttpSolrClient(cluster.getJettySolrRunner(0).getBaseUrl().toString())) { + HealthCheckResponse rsp = req.process(httpSolrClient); + assertEquals(CommonParams.OK, rsp.getNodeStatus()); + } + } + + @Test (expected = AssertionError.class) + public void testHealthCheckHandlerWithCloudClient() throws IOException, SolrServerException { + HealthCheckRequest req = new HealthCheckRequest(); + req.process(cluster.getSolrClient()); + } + +} diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/HealthCheckRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/HealthCheckRequest.java new file mode 100644 index 00000000000..7073167bd54 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/HealthCheckRequest.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.client.solrj.request; + +import java.io.IOException; +import java.util.Collection; + +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.SolrRequest; +import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.client.solrj.response.HealthCheckResponse; +import org.apache.solr.common.params.SolrParams; +import org.apache.solr.common.util.ContentStream; + +import static org.apache.solr.common.params.CommonParams.HEALTH_CHECK_HANDLER_PATH; + +public class HealthCheckRequest extends SolrRequest { + + public HealthCheckRequest() { + this(METHOD.GET, HEALTH_CHECK_HANDLER_PATH); + } + + private HealthCheckRequest(METHOD m, String path) { + super(m, path); + } + + @Override + public SolrParams getParams() { + return null; + } + + @Override + public Collection getContentStreams() throws IOException { + return null; + } + + @Override + protected HealthCheckResponse createResponse(SolrClient client) { + // TODO: Accept requests w/ CloudSolrClient while ensuring that the request doesn't get routed to + // an unintended recepient. + assert client instanceof HttpSolrClient; + return new HealthCheckResponse(); + } + + +} diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/HealthCheckResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/HealthCheckResponse.java new file mode 100644 index 00000000000..b6fc36bb6b7 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/HealthCheckResponse.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.client.solrj.response; + +import org.apache.solr.common.util.NamedList; + +public class HealthCheckResponse extends SolrResponseBase { + + public HealthCheckResponse() { + } + + public NamedList getErrorMessages() { + return (NamedList) getResponse().get( "errors" ); + } + + public String getMessage() { + return (String) getResponse().get("message"); + } + + public String getNodeStatus() { + return (String) getResponse().get("status"); + } + +} diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java index 3b24f224cd5..a206c49f6ef 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java @@ -176,6 +176,7 @@ public interface CommonParams { String OMIT_HEADER = "omitHeader"; String CORES_HANDLER_PATH = "/admin/cores"; String COLLECTIONS_HANDLER_PATH = "/admin/collections"; + String HEALTH_CHECK_HANDLER_PATH = "/admin/health"; String INFO_HANDLER_PATH = "/admin/info"; String CONFIGSETS_HANDLER_PATH = "/admin/configs"; String AUTHZ_PATH = "/admin/authorization"; @@ -185,9 +186,15 @@ public interface CommonParams { String AUTOSCALING_PATH = "/admin/autoscaling"; String AUTOSCALING_DIAGNOSTICS_PATH = "/admin/autoscaling/diagnostics"; + String STATUS = "status"; + + String OK = "OK"; + String FAILURE = "FAILURE"; + Set ADMIN_PATHS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList( CORES_HANDLER_PATH, COLLECTIONS_HANDLER_PATH, + HEALTH_CHECK_HANDLER_PATH, CONFIGSETS_HANDLER_PATH, AUTHC_PATH, AUTHZ_PATH, From f80f1c0962014e086c932685e2cfc46a5cf0371d Mon Sep 17 00:00:00 2001 From: Cao Manh Dat Date: Thu, 3 Aug 2017 09:16:10 +0700 Subject: [PATCH 57/95] SOLR-11011: Fix AssignTest.testIdIsUnique() failure --- .../java/org/apache/solr/cloud/Assign.java | 30 ++++++++++++++----- .../solr/cloud/CreateCollectionCmd.java | 2 +- .../org/apache/solr/cloud/OverseerTest.java | 4 +++ 3 files changed, 28 insertions(+), 8 deletions(-) diff --git a/solr/core/src/java/org/apache/solr/cloud/Assign.java b/solr/core/src/java/org/apache/solr/cloud/Assign.java index 98dcfde0e58..116fce9e6dd 100644 --- a/solr/core/src/java/org/apache/solr/cloud/Assign.java +++ b/solr/core/src/java/org/apache/solr/cloud/Assign.java @@ -115,12 +115,12 @@ public class Assign { public static String assignNode(SolrZkClient client, DocCollection collection) { // for backward compatibility; - int numReplicas = collection.getReplicas().size(); - String coreNodeName = "core_node" + incAndGetId(client, collection.getName(), numReplicas * 20); + int defaultValue = defaultCounterValue(collection, false); + String coreNodeName = "core_node" + incAndGetId(client, collection.getName(), defaultValue); while (collection.getReplica(coreNodeName) != null) { // there is wee chance that, the new coreNodeName id not totally unique, // but this will be guaranteed unique for new collections - coreNodeName = "core_node" + incAndGetId(client, collection.getName(), numReplicas * 20); + coreNodeName = "core_node" + incAndGetId(client, collection.getName(), defaultValue); } return coreNodeName; } @@ -174,18 +174,34 @@ public class Assign { return String.format(Locale.ROOT, "%s_%s_replica_%s%s", collectionName, shard, type.name().substring(0,1).toLowerCase(Locale.ROOT), replicaNum); } - public static String buildCoreName(SolrZkClient zkClient, DocCollection collection, String shard, Replica.Type type) { + private static int defaultCounterValue(DocCollection collection, boolean newCollection) { + if (newCollection) return 0; + int defaultValue = collection.getReplicas().size(); + if (collection.getReplicationFactor() != null) { + // numReplicas and replicationFactor * numSlices can be not equals, + // in case of many addReplicas or deleteReplicas are executed + defaultValue = Math.max(defaultValue, + collection.getReplicationFactor() * collection.getSlices().size()); + } + return defaultValue * 20; + } + + public static String buildCoreName(SolrZkClient zkClient, DocCollection collection, String shard, Replica.Type type, boolean newCollection) { Slice slice = collection.getSlice(shard); - int numReplicas = collection.getReplicas().size(); - int replicaNum = incAndGetId(zkClient, collection.getName(), numReplicas * 20); + int defaultValue = defaultCounterValue(collection, newCollection); + int replicaNum = incAndGetId(zkClient, collection.getName(), defaultValue); String coreName = buildCoreName(collection.getName(), shard, type, replicaNum); while (existCoreName(coreName, slice)) { - replicaNum = incAndGetId(zkClient, collection.getName(), numReplicas * 20); + replicaNum = incAndGetId(zkClient, collection.getName(), defaultValue); coreName = buildCoreName(collection.getName(), shard, type, replicaNum); } return coreName; } + public static String buildCoreName(SolrZkClient zkClient, DocCollection collection, String shard, Replica.Type type) { + return buildCoreName(zkClient, collection, shard, type, false); + } + private static boolean existCoreName(String coreName, Slice slice) { if (slice == null) return false; for (Replica replica : slice.getReplicas()) { diff --git a/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java b/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java index 3a51ada8b20..fc96a61e13f 100644 --- a/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java +++ b/solr/core/src/java/org/apache/solr/cloud/CreateCollectionCmd.java @@ -212,7 +212,7 @@ public class CreateCollectionCmd implements Cmd { for (ReplicaPosition replicaPosition : replicaPositions) { String nodeName = replicaPosition.node; String coreName = Assign.buildCoreName(ocmh.zkStateReader.getZkClient(), zkStateReader.getClusterState().getCollection(collectionName), - replicaPosition.shard, replicaPosition.type); + replicaPosition.shard, replicaPosition.type, true); log.debug(formatString("Creating core {0} as part of shard {1} of collection {2} on {3}" , coreName, replicaPosition.shard, collectionName, nodeName)); diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java index 1fbf98cf233..b921e39a242 100644 --- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java @@ -441,6 +441,7 @@ public class OverseerTest extends SolrTestCaseJ4 { ZkStateReader.COLLECTION_PROP, COLLECTION, ZkStateReader.SHARD_ID_PROP, "shard1", ZkStateReader.CORE_NAME_PROP, "core1", + ZkStateReader.CORE_NODE_NAME_PROP, "core_node1", ZkStateReader.ROLES_PROP, "", ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString()); @@ -1063,6 +1064,7 @@ public class OverseerTest extends SolrTestCaseJ4 { ZkStateReader.NODE_NAME_PROP, "node1", ZkStateReader.COLLECTION_PROP, "c1", ZkStateReader.CORE_NAME_PROP, "core1", + ZkStateReader.CORE_NODE_NAME_PROP, "core_node1", ZkStateReader.ROLES_PROP, "", ZkStateReader.STATE_PROP, Replica.State.DOWN.toString()); @@ -1118,6 +1120,7 @@ public class OverseerTest extends SolrTestCaseJ4 { "collection", "test", ZkStateReader.SHARD_ID_PROP, "x", ZkStateReader.BASE_URL_PROP, "http://127.0.0.1/solr", + ZkStateReader.CORE_NODE_NAME_PROP, "core_node1", ZkStateReader.NODE_NAME_PROP, "node1", ZkStateReader.CORE_NAME_PROP, "core1", ZkStateReader.STATE_PROP, Replica.State.DOWN.toString() @@ -1231,6 +1234,7 @@ public class OverseerTest extends SolrTestCaseJ4 { ZkStateReader.NODE_NAME_PROP, "node"+N, ZkStateReader.COLLECTION_PROP, COLLECTION, ZkStateReader.CORE_NAME_PROP, "core"+N, + ZkStateReader.CORE_NODE_NAME_PROP, "core_node"+N, ZkStateReader.ROLES_PROP, "", ZkStateReader.STATE_PROP, Replica.State.RECOVERING.toString()); From 39d6be4ecc4b41b7462ea05d442bc9e15fba7891 Mon Sep 17 00:00:00 2001 From: Karl Wright Date: Thu, 3 Aug 2017 09:32:11 -0400 Subject: [PATCH 58/95] LUCENE-7606: Add spatial relationships between all currently-defined Geo shapes --- .../lucene/spatial3d/geom/GeoAreaShape.java | 39 + .../apache/lucene/spatial3d/geom/GeoBBox.java | 2 +- .../spatial3d/geom/GeoBaseAreaShape.java | 125 +++ .../lucene/spatial3d/geom/GeoBaseBBox.java | 38 +- .../spatial3d/geom/GeoBaseDistanceShape.java | 2 +- .../lucene/spatial3d/geom/GeoBasePolygon.java | 2 +- .../spatial3d/geom/GeoComplexPolygon.java | 45 +- .../spatial3d/geom/GeoCompositeAreaShape.java | 141 +++ .../geom/GeoCompositeMembershipShape.java | 7 +- .../spatial3d/geom/GeoCompositePolygon.java | 2 +- .../spatial3d/geom/GeoConcavePolygon.java | 21 + .../spatial3d/geom/GeoConvexPolygon.java | 21 + .../geom/GeoDegenerateHorizontalLine.java | 7 +- .../geom/GeoDegenerateLatitudeZone.java | 7 +- .../geom/GeoDegenerateLongitudeSlice.java | 7 +- .../spatial3d/geom/GeoDegeneratePoint.java | 5 + .../geom/GeoDegenerateVerticalLine.java | 7 +- .../spatial3d/geom/GeoDistanceShape.java | 2 +- .../spatial3d/geom/GeoLatitudeZone.java | 46 +- .../spatial3d/geom/GeoLongitudeSlice.java | 33 +- .../spatial3d/geom/GeoNorthLatitudeZone.java | 38 +- .../spatial3d/geom/GeoNorthRectangle.java | 45 +- .../lucene/spatial3d/geom/GeoPolygon.java | 2 +- .../lucene/spatial3d/geom/GeoRectangle.java | 45 +- .../spatial3d/geom/GeoSouthLatitudeZone.java | 37 +- .../spatial3d/geom/GeoSouthRectangle.java | 43 +- .../spatial3d/geom/GeoStandardCircle.java | 8 + .../spatial3d/geom/GeoStandardPath.java | 37 + .../geom/GeoWideDegenerateHorizontalLine.java | 9 +- .../spatial3d/geom/GeoWideLongitudeSlice.java | 32 +- .../spatial3d/geom/GeoWideNorthRectangle.java | 48 +- .../spatial3d/geom/GeoWideRectangle.java | 46 +- .../spatial3d/geom/GeoWideSouthRectangle.java | 44 +- .../lucene/spatial3d/geom/GeoWorld.java | 5 + .../CompositeGeoPolygonRelationshipsTest.java | 842 ++++++++++++++++ .../geom/RandomGeoShapeGenerator.java | 944 ++++++++++++++++++ .../geom/RandomGeoShapeRelationshipTest.java | 240 +++++ .../SimpleGeoPolygonRelationshipsTest.java | 837 ++++++++++++++++ 38 files changed, 3433 insertions(+), 428 deletions(-) create mode 100644 lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoAreaShape.java create mode 100644 lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseAreaShape.java create mode 100644 lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java create mode 100644 lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/CompositeGeoPolygonRelationshipsTest.java create mode 100644 lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeGenerator.java create mode 100644 lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeRelationshipTest.java create mode 100644 lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/SimpleGeoPolygonRelationshipsTest.java diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoAreaShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoAreaShape.java new file mode 100644 index 00000000000..b00ffca2321 --- /dev/null +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoAreaShape.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.spatial3d.geom; + +/** + * Shape that implements GeoArea. This type of shapes are able to resolve the + * spatial relationship of other shapes with itself. + * + * @lucene.experimental + */ + +public interface GeoAreaShape extends GeoMembershipShape, GeoArea{ + + /** + * Assess whether a shape intersects with any of the edges this shape. + * Note well that this method return false if the shape contains, is within + * or is disjoint with the given shape. + * + * @param geoShape is the shape to assess for intersection with this shape's edges. + * + * @return true if there's such an intersection, false if not. + */ + boolean intersects(GeoShape geoShape); +} diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBBox.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBBox.java index 0ae242592d6..683333cbe9d 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBBox.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBBox.java @@ -23,7 +23,7 @@ package org.apache.lucene.spatial3d.geom; * * @lucene.experimental */ -public interface GeoBBox extends GeoMembershipShape, GeoSizeable, GeoArea { +public interface GeoBBox extends GeoAreaShape, GeoSizeable { /** * Expand box by specified angle. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseAreaShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseAreaShape.java new file mode 100644 index 00000000000..5bd8ab3e242 --- /dev/null +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseAreaShape.java @@ -0,0 +1,125 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.spatial3d.geom; + +/** + * Base extended areaShape object. + * + * @lucene.internal + */ +abstract class GeoBaseAreaShape extends GeoBaseMembershipShape implements GeoAreaShape { + + /** Constructor. + *@param planetModel is the planet model to use. + */ + public GeoBaseAreaShape(final PlanetModel planetModel) { + super(planetModel); + } + + /** All edgepoints inside shape */ + protected final static int ALL_INSIDE = 0; + /** Some edgepoints inside shape */ + protected final static int SOME_INSIDE = 1; + /** No edgepoints inside shape */ + protected final static int NONE_INSIDE = 2; + + /** Determine the relationship between the GeoAreShape and the + * shape's edgepoints. + *@param geoShape is the shape. + *@return the relationship. + */ + protected int isShapeInsideGeoAreaShape(final GeoShape geoShape) { + boolean foundOutside = false; + boolean foundInside = false; + for (GeoPoint p : geoShape.getEdgePoints()) { + if (isWithin(p)) { + foundInside = true; + } else { + foundOutside = true; + } + if (foundInside && foundOutside) { + return SOME_INSIDE; + } + } + if (!foundInside && !foundOutside) + return NONE_INSIDE; + if (foundInside && !foundOutside) + return ALL_INSIDE; + if (foundOutside && !foundInside) + return NONE_INSIDE; + return SOME_INSIDE; + } + + /** Determine the relationship between the GeoAreaShape's edgepoints and the + * provided shape. + *@param geoshape is the shape. + *@return the relationship. + */ + protected int isGeoAreaShapeInsideShape(final GeoShape geoshape) { + boolean foundOutside = false; + boolean foundInside = false; + for (GeoPoint p : getEdgePoints()) { + if (geoshape.isWithin(p)) { + foundInside = true; + } else { + foundOutside = true; + } + if (foundInside && foundOutside) { + return SOME_INSIDE; + } + } + if (!foundInside && !foundOutside) + return NONE_INSIDE; + if (foundInside && !foundOutside) + return ALL_INSIDE; + if (foundOutside && !foundInside) + return NONE_INSIDE; + return SOME_INSIDE; + } + + @Override + public int getRelationship(GeoShape geoShape) { + final int insideGeoAreaShape = isShapeInsideGeoAreaShape(geoShape); + if (insideGeoAreaShape == SOME_INSIDE) { + return GeoArea.OVERLAPS; + } + + final int insideShape = isGeoAreaShapeInsideShape(geoShape); + if (insideShape == SOME_INSIDE) { + return GeoArea.OVERLAPS; + } + + if (insideGeoAreaShape == ALL_INSIDE && insideShape==ALL_INSIDE) { + return GeoArea.OVERLAPS; + } + + if (intersects(geoShape)){ + return GeoArea.OVERLAPS; + } + + if (insideGeoAreaShape == ALL_INSIDE) { + return GeoArea.WITHIN; + } + + if (insideShape==ALL_INSIDE) { + return GeoArea.CONTAINS; + } + + return GeoArea.DISJOINT; + } +} diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseBBox.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseBBox.java index ba92cc2242e..59562beca87 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseBBox.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseBBox.java @@ -22,7 +22,7 @@ package org.apache.lucene.spatial3d.geom; * * @lucene.internal */ -abstract class GeoBaseBBox extends GeoBaseMembershipShape implements GeoBBox { +abstract class GeoBaseBBox extends GeoBaseAreaShape implements GeoBBox { /** Construct, given planet model. *@param planetModel is the planet model. @@ -31,42 +31,6 @@ abstract class GeoBaseBBox extends GeoBaseMembershipShape implements GeoBBox { super(planetModel); } - // Signals for relationship of edge points to shape - - /** All edgepoints inside shape */ - protected final static int ALL_INSIDE = 0; - /** Some edgepoints inside shape */ - protected final static int SOME_INSIDE = 1; - /** No edgepoints inside shape */ - protected final static int NONE_INSIDE = 2; - - /** Determine the relationship between this BBox and the provided - * shape's edgepoints. - *@param path is the shape. - *@return the relationship. - */ - protected int isShapeInsideBBox(final GeoShape path) { - final GeoPoint[] pathPoints = path.getEdgePoints(); - boolean foundOutside = false; - boolean foundInside = false; - for (GeoPoint p : pathPoints) { - if (isWithin(p)) { - foundInside = true; - } else { - foundOutside = true; - } - if (foundInside && foundOutside) { - return SOME_INSIDE; - } - } - if (!foundInside && !foundOutside) - return NONE_INSIDE; - if (foundInside && !foundOutside) - return ALL_INSIDE; - if (foundOutside && !foundInside) - return NONE_INSIDE; - return SOME_INSIDE; - } } diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseDistanceShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseDistanceShape.java index 82e811ad6b5..d7be2ab1a80 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseDistanceShape.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBaseDistanceShape.java @@ -22,7 +22,7 @@ package org.apache.lucene.spatial3d.geom; * * @lucene.experimental */ -public abstract class GeoBaseDistanceShape extends GeoBaseMembershipShape implements GeoDistanceShape { +public abstract class GeoBaseDistanceShape extends GeoBaseAreaShape implements GeoDistanceShape { /** Constructor. *@param planetModel is the planet model to use. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBasePolygon.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBasePolygon.java index 08c0634cefe..7fe8c98d049 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBasePolygon.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoBasePolygon.java @@ -21,7 +21,7 @@ package org.apache.lucene.spatial3d.geom; * * @lucene.internal */ -abstract class GeoBasePolygon extends GeoBaseMembershipShape implements GeoPolygon { +abstract class GeoBasePolygon extends GeoBaseAreaShape implements GeoPolygon { /** Constructor. *@param planetModel is the planet model to use. diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoComplexPolygon.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoComplexPolygon.java index 7f8f649d1ff..6bd66d41584 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoComplexPolygon.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoComplexPolygon.java @@ -369,6 +369,32 @@ class GeoComplexPolygon extends GeoBasePolygon { return true; } + @Override + public boolean intersects(GeoShape geoShape) { + // Create the intersector + final EdgeIterator intersector = new IntersectorShapeIterator(geoShape); + // First, compute the bounds for the the plane + final XYZBounds xyzBounds = new XYZBounds(); + geoShape.getBounds(xyzBounds); + + // Figure out which tree likely works best + final double xDelta = xyzBounds.getMaximumX() - xyzBounds.getMinimumX(); + final double yDelta = xyzBounds.getMaximumY() - xyzBounds.getMinimumY(); + final double zDelta = xyzBounds.getMaximumZ() - xyzBounds.getMinimumZ(); + // Select the smallest range + if (xDelta <= yDelta && xDelta <= zDelta) { + // Drill down in x + return !xTree.traverse(intersector, xyzBounds.getMinimumX(), xyzBounds.getMaximumX()); + } else if (yDelta <= xDelta && yDelta <= zDelta) { + // Drill down in y + return !yTree.traverse(intersector, xyzBounds.getMinimumY(), xyzBounds.getMaximumY()); + } else if (zDelta <= xDelta && zDelta <= yDelta) { + // Drill down in z + return !zTree.traverse(intersector, xyzBounds.getMinimumZ(), xyzBounds.getMaximumZ()); + } + return true; + } + @Override public void getBounds(Bounds bounds) { @@ -691,7 +717,24 @@ class GeoComplexPolygon extends GeoBasePolygon { } } - + + + /** Assess whether edge intersects the provided shape. + */ + private class IntersectorShapeIterator implements EdgeIterator { + + private final GeoShape shape; + + public IntersectorShapeIterator(final GeoShape shape) { + this.shape = shape; + } + + @Override + public boolean matches(final Edge edge) { + return !shape.intersects(edge.plane, edge.notablePoints, edge.startPlane, edge.endPlane); + } + } + /** Count the number of verifiable edge crossings. */ private class LinearCrossingEdgeIterator implements EdgeIterator { diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java new file mode 100644 index 00000000000..c7ad81d99d5 --- /dev/null +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java @@ -0,0 +1,141 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.lucene.spatial3d.geom; + +/** + * GeoCompositeAreShape is a set of GeoAreaShape's, treated as a unit. + * + * @lucene.experimental + */ +public class GeoCompositeAreaShape extends GeoCompositeMembershipShape implements GeoAreaShape { + + /** + * Add a shape to the composite. It throw an IllegalArgumentException + * if the shape is not a GeoAreaShape + *@param shape is the shape to add. + */ + @Override + public void addShape(final GeoMembershipShape shape) { + if (!(shape instanceof GeoAreaShape)){ + throw new IllegalArgumentException("GeoCompositeAreaShape must be composed of GeoAreaShapes"); + } + shapes.add(shape); + } + + public boolean intersects(GeoShape geoShape){ + for(GeoShape inShape : shapes){ + if (((GeoAreaShape)inShape).intersects(geoShape)){ + return true; + } + } + return false; + } + + /** All edgepoints inside shape */ + protected final static int ALL_INSIDE = 0; + /** Some edgepoints inside shape */ + protected final static int SOME_INSIDE = 1; + /** No edgepoints inside shape */ + protected final static int NONE_INSIDE = 2; + + /** Determine the relationship between the GeoAreShape and the + * shape's edgepoints. + *@param geoShape is the shape. + *@return the relationship. + */ + protected int isShapeInsideGeoAreaShape(final GeoShape geoShape) { + boolean foundOutside = false; + boolean foundInside = false; + for (GeoPoint p : geoShape.getEdgePoints()) { + if (isWithin(p)) { + foundInside = true; + } else { + foundOutside = true; + } + if (foundInside && foundOutside) { + return SOME_INSIDE; + } + } + if (!foundInside && !foundOutside) + return NONE_INSIDE; + if (foundInside && !foundOutside) + return ALL_INSIDE; + if (foundOutside && !foundInside) + return NONE_INSIDE; + return SOME_INSIDE; + } + + /** Determine the relationship between the GeoAreShape's edgepoints and the + * provided shape. + *@param geoshape is the shape. + *@return the relationship. + */ + protected int isGeoAreaShapeInsideShape(final GeoShape geoshape) { + boolean foundOutside = false; + boolean foundInside = false; + for (GeoPoint p : getEdgePoints()) { + if (geoshape.isWithin(p)) { + foundInside = true; + } else { + foundOutside = true; + } + if (foundInside && foundOutside) { + return SOME_INSIDE; + } + } + if (!foundInside && !foundOutside) + return NONE_INSIDE; + if (foundInside && !foundOutside) + return ALL_INSIDE; + if (foundOutside && !foundInside) + return NONE_INSIDE; + return SOME_INSIDE; + } + + @Override + public int getRelationship(GeoShape geoShape) { + final int insideGeoAreaShape = isShapeInsideGeoAreaShape(geoShape); + if (insideGeoAreaShape == SOME_INSIDE) { + return GeoArea.OVERLAPS; + } + + final int insideShape = isGeoAreaShapeInsideShape(geoShape); + if (insideShape == SOME_INSIDE) { + return GeoArea.OVERLAPS; + } + + if (insideGeoAreaShape == ALL_INSIDE && insideShape==ALL_INSIDE) { + return GeoArea.OVERLAPS; + } + + if (intersects(geoShape)){ + return GeoArea.OVERLAPS; + } + + if (insideGeoAreaShape == ALL_INSIDE) { + return GeoArea.WITHIN; + } + + if (insideShape==ALL_INSIDE) { + return GeoArea.CONTAINS; + } + + return GeoArea.DISJOINT; + } + +} diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeMembershipShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeMembershipShape.java index 6600a343a1f..24428f90970 100755 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeMembershipShape.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeMembershipShape.java @@ -17,6 +17,7 @@ package org.apache.lucene.spatial3d.geom; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; /** @@ -57,7 +58,11 @@ public class GeoCompositeMembershipShape implements GeoMembershipShape { @Override public GeoPoint[] getEdgePoints() { - return shapes.get(0).getEdgePoints(); + List edgePoints = new ArrayList<>(); + for(int i=0;i points = new ArrayList<>(); + points.add(point1); + points.add(point2); + points.add(point3); + points.add(point4); + points.add(point5); + return GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); + } + + private GeoPolygon buildConcaveGeoPolygon(double lon1,double lat1, + double lon2,double lat2, + double lon3,double lat3, + double lon4,double lat4) + { + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + final List points = new ArrayList<>(); + points.add(point1); + points.add(point2); + points.add(point3); + points.add(point4); + return GeoPolygonFactory.makeGeoConcavePolygon(PlanetModel.SPHERE,points); + } + + private GeoPolygon getCompositePolygon(){ + //POLYGON((0 80, 45 85 ,90 80,135 85,180 80, -135 85, -90 80, -45 85,0 80)) + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(0)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(45)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(90)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(135)); + GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(180)); + GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(-135)); + GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(-90)); + GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(-45)); + final List points = new ArrayList<>(); + points.add(point1); + points.add(point2); + points.add(point3); + points.add(point4); + points.add(point5); + points.add(point6); + points.add(point7); + points.add(point8); + return GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); + } + + private GeoPolygon getComplexPolygon(){ + //POLYGON((0 80, 45 85 ,90 80,135 85,180 80, -135 85, -90 80, -45 85,0 80)) + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(0)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(45)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(90)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(135)); + GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(180)); + GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(-135)); + GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(-90)); + GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(-45)); + final List points = new ArrayList<>(); + points.add(point1); + points.add(point2); + points.add(point3); + points.add(point4); + points.add(point5); + points.add(point6); + points.add(point7); + points.add(point8); + GeoPolygonFactory.PolygonDescription pd = new GeoPolygonFactory.PolygonDescription(points); + return GeoPolygonFactory.makeLargeGeoPolygon(PlanetModel.SPHERE, Collections.singletonList(pd)); + } + + private GeoPolygon getMultiPolygon(){ + //MULTIPOLYGON(((-145.790967486 -5.17543698881, -145.790854979 -5.11348060995, -145.853073512 -5.11339421216, -145.853192037 -5.17535061936, -145.790967486 -5.17543698881)), + //((-145.8563923 -5.17527125408, -145.856222168 -5.11332154814, -145.918433943 -5.11317773171, -145.918610092 -5.17512738429, -145.8563923 -5.17527125408))) + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17543698881), Math.toRadians(-145.790967486)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11348060995), Math.toRadians(-145.790854979)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11339421216), Math.toRadians(-145.853073512)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17535061936), Math.toRadians(-145.853192037)); + final List points1 = new ArrayList<>(); + points1.add(point1); + points1.add(point2); + points1.add(point3); + points1.add(point4); + GeoPolygonFactory.PolygonDescription pd1 = new GeoPolygonFactory.PolygonDescription(points1); + GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17527125408), Math.toRadians(-145.8563923)); + GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11332154814), Math.toRadians(-145.856222168)); + GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11317773171), Math.toRadians(-145.918433943)); + GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17512738429), Math.toRadians(-145.918610092)); + final List points2 = new ArrayList<>(); + points2.add(point5); + points2.add(point6); + points2.add(point7); + points2.add(point8); + GeoPolygonFactory.PolygonDescription pd2 = new GeoPolygonFactory.PolygonDescription(points2); + final List pds = new ArrayList<>(); + pds.add(pd1); + pds.add(pd2); + return GeoPolygonFactory.makeLargeGeoPolygon(PlanetModel.SPHERE, pds); + } + + public GeoShape getInsideCompositeShape(){ + //MULTIPOLYGON(((19.945091 -60.552631, 20.319948 -61.555652, 20.9 -61.5, 20.9 -61, 19.945091 -60.552631)), + // ((21.1 -61.5, 23.107901 -61.253298, 22.720804 -60.457713,21.1 -61, 21.1 -61.5))) + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-60.552631), Math.toRadians(19.945091)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61.555652), Math.toRadians(20.319948)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61.5), Math.toRadians(20.9)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61), Math.toRadians(20.9)); + final List points1 = new ArrayList<>(); + points1.add(point1); + points1.add(point2); + points1.add(point3); + points1.add(point4); + GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61.5), Math.toRadians(21.1)); + GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61.253298), Math.toRadians(23.107901)); + GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-60.457713), Math.toRadians(22.720804)); + GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61), Math.toRadians(21.1)); + final List points2 = new ArrayList<>(); + points2.add(point5); + points2.add(point6); + points2.add(point7); + points2.add(point8); + GeoPolygon p1 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points1); + GeoPolygon p2 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points2); + GeoCompositeMembershipShape compositeMembershipShape = new GeoCompositeMembershipShape(); + compositeMembershipShape.addShape(p1); + compositeMembershipShape.addShape(p2); + return compositeMembershipShape; + } + +} diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeGenerator.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeGenerator.java new file mode 100644 index 00000000000..f8b662964e1 --- /dev/null +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeGenerator.java @@ -0,0 +1,944 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one or more +* contributor license agreements. See the NOTICE file distributed with +* this work for additional information regarding copyright ownership. +* The ASF licenses this file to You under the Apache License, Version 2.0 +* (the "License"); you may not use this file except in compliance with +* the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.lucene.spatial3d.geom; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; + +import org.apache.lucene.util.LuceneTestCase; + +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomDouble; + +/** + * Class for generating random Geo3dShapes. They can be generated under + * given constraints which are expressed as a shape and a relationship. + * + * note that convexity for polygons is defined as polygons that contains + * antipodal points, otherwise they are convex. Internally they can be + * created using GeoConvexPolygons and GeoConcavePolygons. + * + */ +public class RandomGeoShapeGenerator extends LuceneTestCase { + + /* Max num of iterations to find right shape under given constrains */ + final private static int MAX_SHAPE_ITERATIONS = 50; + /* Max num of iterations to find right point under given constrains */ + final private static int MAX_POINT_ITERATIONS = 1000; + + /* Supported shapes */ + final protected static int CONVEX_POLYGON = 0; + final protected static int CONVEX_POLYGON_WITH_HOLES = 1; + final protected static int CONCAVE_POLYGON = 2; + final protected static int CONCAVE_POLYGON_WITH_HOLES = 3; + final protected static int COMPLEX_POLYGON = 4; + final protected static int CIRCLE = 5; + final protected static int RECTANGLE = 6; + final protected static int PATH = 7; + final protected static int COLLECTION = 8; + + /* Helper shapes for generating constraints whch are just three sided polygons */ + final protected static int CONVEX_SIMPLE_POLYGON = 500; + final protected static int CONCAVE_SIMPLE_POLYGON = 501; + + + /** + * Method that returns empty Constraints object.. + * + * @return an empty Constraints object + */ + public Constraints getEmptyConstraint(){ + return new Constraints(); + } + + /** + * Method that returns a random generated a random Shape code from all + * supported shapes. + * + * @return a random generated shape code + */ + public int randomShapeType(){ + return random().nextInt(9); + } + + /** + * Method that returns a random generated a random Shape code from all + * convex supported shapes. + * + * @return a random generated convex shape code + */ + public int randomConvexShapeType(){ + int shapeType = randomShapeType(); + while (isConcave(shapeType)){ + shapeType = randomShapeType(); + } + return shapeType; + } + + /** + * Method that returns a random generated a random Shape code from all + * concave supported shapes. + * + * @return a random generated concave shape code + */ + public int randomConcaveShapeType(){ + int shapeType = randomShapeType(); + while (!isConcave(shapeType)){ + shapeType = randomShapeType(); + } + return shapeType; + } + + /** + * Method that returns a random generated GeoAreaShape code from all + * supported GeoAreaShapes. + * + * We are removing Collections because it is difficult to create shapes + * with properties in some cases. + * + * @return a random generated polygon code + */ + public int randomGeoAreaShapeType(){ + return random().nextInt(8); + } + + /** + * Check if a shape code represents a concave shape + * + * @return true if the shape represented by the code is concave + */ + public boolean isConcave(int shapeType){ + return (shapeType == CONCAVE_POLYGON); + } + + /** + * Method that returns a random generated Planet model from the supported + * Planet models. currently SPHERE and WGS84 + * + * @return a random generated Planet model + */ + public PlanetModel randomPlanetModel() { + final int shapeType = random().nextInt(2); + switch (shapeType) { + case 0: { + return PlanetModel.SPHERE; + } + case 1: { + return PlanetModel.WGS84; + } + default: + throw new IllegalStateException("Unexpected planet model"); + } + } + + /** + * Method that returns a random generated GeoPoint under given constraints. Returns + * NULL if it cannot find a point under the given constraints. + * + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoPoint. + */ + public GeoPoint randomGeoPoint(PlanetModel planetModel, Constraints constraints) { + int iterations = 0; + while (iterations < MAX_POINT_ITERATIONS) { + double lat = randomDouble(); + if (Math.PI/2 - Math.abs(lat) <0){ + continue; + } + double lon = randomDouble(); + if (Math.PI - Math.abs(lat) <0){ + continue; + } + iterations++; + GeoPoint point = new GeoPoint(planetModel, lat, lon); + if (constraints.isWithin(point)) { + return point; + } + } + return null; + } + + /** + * Method that returns a random generated GeoAreaShape. + * + * @param shapeType The GeoAreaShape code. + * @param planetModel The planet model. + * @return The random generated GeoAreaShape. + */ + public GeoAreaShape randomGeoAreaShape(int shapeType, PlanetModel planetModel){ + GeoAreaShape geoAreaShape = null; + while (geoAreaShape == null){ + geoAreaShape = randomGeoAreaShape(shapeType,planetModel,new Constraints()); + } + return geoAreaShape; + } + + /** + * Method that returns a random generated GeoAreaShape under given constraints. Returns + * NULL if it cannot build the GeoAreaShape under the given constraints. + * + * @param shapeType The GeoAreaShape code. + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoAreaShape. + */ + public GeoAreaShape randomGeoAreaShape(int shapeType, PlanetModel planetModel, Constraints constraints){ + return (GeoAreaShape)randomGeoShape(shapeType, planetModel, constraints); + } + + /** + * Method that returns a random generated GeoShape. + * + * @param shapeType The shape code. + * @param planetModel The planet model. + * @return The random generated GeoShape. + */ + public GeoShape randomGeoShape(int shapeType, PlanetModel planetModel){ + GeoShape geoShape = null; + while (geoShape == null){ + geoShape = randomGeoShape(shapeType,planetModel,new Constraints()); + } + return geoShape; + } + + /** + * Method that returns a random generated GeoShape under given constraints. Returns + * NULL if it cannot build the GeoShape under the given constraints. + * + * @param shapeType The polygon code. + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoShape. + */ + public GeoShape randomGeoShape(int shapeType, PlanetModel planetModel, Constraints constraints){ + switch (shapeType) { + case CONVEX_POLYGON: { + return convexPolygon(planetModel, constraints); + } + case CONVEX_POLYGON_WITH_HOLES: { + return convexPolygonWithHoles(planetModel, constraints); + } + case CONCAVE_POLYGON: { + return concavePolygon(planetModel, constraints); + } + case CONCAVE_POLYGON_WITH_HOLES: { + return concavePolygonWithHoles(planetModel, constraints); + } + case COMPLEX_POLYGON: { + return complexPolygon(planetModel, constraints); + } + case CIRCLE: { + return circle(planetModel, constraints); + } + case RECTANGLE: { + return rectangle(planetModel, constraints); + } + case PATH: { + return path(planetModel, constraints); + } + case COLLECTION: { + return collection(planetModel, constraints); + } + case CONVEX_SIMPLE_POLYGON: { + return simpleConvexPolygon(planetModel, constraints); + } + case CONCAVE_SIMPLE_POLYGON: { + return concaveSimplePolygon(planetModel, constraints); + } + default: + throw new IllegalStateException("Unexpected shape type"); + } + } + + /** + * Method that returns a random generated a GeoCircle under given constraints. Returns + * NULL if it cannot build the GeoCircle under the given constraints. + * + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoCircle. + */ + private GeoCircle circle(PlanetModel planetModel , Constraints constraints) { + int iterations=0; + while (iterations < MAX_SHAPE_ITERATIONS) { + iterations++; + final GeoPoint center = randomGeoPoint(planetModel, constraints); + if (center == null){ + continue; + } + final double radius = randomCutoffAngle(); + try { + + GeoCircle circle = GeoCircleFactory.makeGeoCircle(planetModel, center.getLatitude(), center.getLongitude(), radius); + if (!constraints.valid(circle)) { + continue; + } + return circle; + } catch (IllegalArgumentException e) { + continue; + } + } + return null; + } + + /** + * Method that returns a random generated a GeoBBox under given constraints. Returns + * NULL if it cannot build the GeoBBox under the given constraints. + * + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoBBox. + */ + private GeoBBox rectangle(PlanetModel planetModel, Constraints constraints) { + + int iterations = 0; + while (iterations < MAX_SHAPE_ITERATIONS) { + iterations++; + final GeoPoint point1 = randomGeoPoint(planetModel, constraints); + if (point1 == null){ + continue; + } + final GeoPoint point2 = randomGeoPoint(planetModel, constraints); + if (point2 == null){ + continue; + } + + double minLat = Math.min(point1.getLatitude(), point2.getLatitude()); + double maxLat = Math.max(point1.getLatitude(), point2.getLatitude()); + double minLon = Math.min(point1.getLongitude(), point2.getLongitude()); + double maxLon = Math.max(point1.getLongitude(), point2.getLongitude()); + + try { + GeoBBox bbox = GeoBBoxFactory.makeGeoBBox(planetModel, maxLat, minLat, minLon, maxLon); + if (!constraints.valid(bbox)) { + continue; + } + return bbox; + } catch (IllegalArgumentException e) { + continue; + } + } + return null; + } + + /** + * Method that returns a random generated a GeoPath under given constraints. Returns + * NULL if it cannot build the GeoPath under the given constraints. + * + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoPath. + */ + private GeoPath path(PlanetModel planetModel, Constraints constraints) { + int iterations = 0; + while (iterations < MAX_SHAPE_ITERATIONS) { + iterations++; + int vertexCount = random().nextInt(2) + 2; + List geoPoints = points(vertexCount, planetModel, constraints); + double width =randomCutoffAngle(); + try { + GeoPath path = GeoPathFactory.makeGeoPath(planetModel, width, geoPoints.toArray(new GeoPoint[geoPoints.size()])); + if (!constraints.valid(path)) { + continue; + } + return path; + } catch (IllegalArgumentException e) { + continue; + } + } + return null; + } + + /** + * Method that returns a random generated a GeoCompositeMembershipShape under given constraints. Returns + * NULL if it cannot build the GGeoCompositeMembershipShape under the given constraints. + * + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoCompositeMembershipShape. + */ + private GeoCompositeAreaShape collection(PlanetModel planetModel, Constraints constraints) { + int iterations = 0; + while (iterations < MAX_SHAPE_ITERATIONS) { + iterations++; + int numberShapes = random().nextInt(3) + 2; + GeoCompositeAreaShape collection = new GeoCompositeAreaShape(); + for(int i=0; i geoPoints = points(vertexCount,planetModel, constraints); + List orderedGeoPoints = orderPoints(geoPoints); + try { + GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(planetModel, orderedGeoPoints); + if (!constraints.valid(polygon) || isConcave(planetModel, polygon)) { + continue; + } + return polygon; + } catch (IllegalArgumentException e) { + continue; + } + } + return null; + } + + /** + * Method that returns a random generated a convex GeoPolygon with holes under given constraints. Returns + * NULL if it cannot build the GeoPolygon with holes under the given constraints. + * + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoPolygon. + */ + private GeoPolygon convexPolygonWithHoles(PlanetModel planetModel, Constraints constraints) { + int vertexCount = random().nextInt(4) + 3; + int iterations = 0; + while (iterations < MAX_SHAPE_ITERATIONS) { + iterations++; + List geoPoints = points(vertexCount,planetModel, constraints); + List orderedGeoPoints = orderPoints(geoPoints); + try { + GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(planetModel, orderedGeoPoints); + //polygon should comply with all constraints except disjoint as we have holes + Constraints polygonConstraints = new Constraints(); + polygonConstraints.putAll(constraints.getContains()); + polygonConstraints.putAll(constraints.getWithin()); + polygonConstraints.putAll(constraints.getDisjoint()); + if (!polygonConstraints.valid(polygon) || isConcave(planetModel, polygon)){ + continue; + } + //hole must overlap with polygon and comply with any CONTAINS constraint. + Constraints holeConstraints = new Constraints(); + holeConstraints.putAll(constraints.getContains()); + holeConstraints.put(polygon,GeoArea.OVERLAPS); + //Points must be with in the polygon and must comply + // CONTAINS and DISJOINT constraints + Constraints pointsConstraints = new Constraints(); + pointsConstraints.put(polygon,GeoArea.WITHIN); + pointsConstraints.putAll(constraints.getContains()); + pointsConstraints.putAll(constraints.getDisjoint()); + List holes = concavePolygonHoles(planetModel, holeConstraints, pointsConstraints); + //we should have at least one hole + if (holes.size() == 0){ + continue; + } + polygon = GeoPolygonFactory.makeGeoPolygon(planetModel,orderedGeoPoints,holes); + if (!constraints.valid(polygon) || isConcave(planetModel, polygon)){ + continue; + } + return polygon; + } catch (IllegalArgumentException e) { + continue; + } + } + return null; + } + + /** + * Method that returns a random list if concave GeoPolygons under given constraints. Method + * use to generate convex holes. Note that constraints for points and holes are different, + * + * @param planetModel The planet model. + * @param holeConstraints The given constraints that a hole must comply. + * @param pointConstraints The given constraints that a point must comply. + * @return The random generated GeoPolygon. + */ + private List concavePolygonHoles(PlanetModel planetModel, + Constraints holeConstraints, + Constraints pointConstraints) { + int iterations =0; + int holesCount = random().nextInt(3) + 1; + List holes = new ArrayList<>(); + while (iterations < MAX_SHAPE_ITERATIONS) { + iterations++; + int vertexCount = random().nextInt(3) + 3; + List geoPoints = points(vertexCount, planetModel, pointConstraints); + geoPoints = orderPoints(geoPoints); + Collections.reverse(geoPoints); + try { + GeoPolygon hole = GeoPolygonFactory.makeGeoPolygon(planetModel, geoPoints); + if (!holeConstraints.valid(hole) || isConvex(planetModel, hole)) { + continue; + } + holes.add(hole); + if (holes.size() == holesCount){ + return holes; + } + pointConstraints.put(hole, GeoArea.DISJOINT); + } catch (IllegalArgumentException e) { + continue; + } + } + return holes; + } + + /** + * Method that returns a random generated a concave GeoPolygon under given constraints. Returns + * NULL if it cannot build the concave GeoPolygon under the given constraints. + * + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoPolygon. + */ + private GeoPolygon concavePolygon(PlanetModel planetModel, Constraints constraints) { + + int vertexCount = random().nextInt(4) + 3; + int iterations = 0; + while (iterations < MAX_SHAPE_ITERATIONS) { + iterations++; + List geoPoints = points(vertexCount,planetModel, constraints); + List orderedGeoPoints = orderPoints(geoPoints); + Collections.reverse(orderedGeoPoints); + try { + GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(planetModel, orderedGeoPoints); + if (!constraints.valid(polygon) || isConvex(planetModel, polygon)) { + continue; + } + return polygon; + } catch (IllegalArgumentException e) { + continue; + } + } + return null; + } + + /** + * Method that returns a random generated a concave GeoPolygon with holes under given constraints. Returns + * NULL if it cannot build the GeoPolygon under the given constraints. Note that the final GeoPolygon is + * convex as the hole wraps the convex GeoPolygon. + * + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoPolygon. + */ + private GeoPolygon concavePolygonWithHoles(PlanetModel planetModel, Constraints constraints) { + int vertexCount = random().nextInt(4) + 3; + int iterations = 0; + while (iterations < MAX_SHAPE_ITERATIONS) { + iterations++; + //we first build the hole. We consider all constraints except + // disjoint as we have a hole + Constraints holeConstraints = new Constraints(); + holeConstraints.putAll(constraints.getContains()); + holeConstraints.putAll(constraints.getWithin()); + holeConstraints.putAll(constraints.getOverlaps()); + GeoPolygon hole = convexPolygon(planetModel, holeConstraints); + if (hole == null){ + continue; + } + // Now we get points for polygon. Must we with in the hole + // and we add contain constraints + Constraints pointConstraints = new Constraints(); + pointConstraints.put(hole, GeoArea.WITHIN); + pointConstraints.putAll(constraints.getContains()); + List geoPoints = points(vertexCount,planetModel, pointConstraints); + List orderedGeoPoints = orderPoints(geoPoints); + Collections.reverse(orderedGeoPoints); + try { + GeoPolygon polygon = GeoPolygonFactory.makeGeoPolygon(planetModel, orderedGeoPoints, Collections.singletonList(hole)); + //final polygon must be convex + if (!constraints.valid(polygon) || isConcave(planetModel,polygon)) { + continue; + } + return polygon; + } catch (IllegalArgumentException e) { + continue; + } + } + return null; + } + + /** + * Method that returns a random generated complex GeoPolygon under given constraints. Returns + * NULL if it cannot build the complex GeoPolygon under the given constraints. + * + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoPolygon. + */ + private GeoPolygon complexPolygon(PlanetModel planetModel, Constraints constraints) { + int polygonsCount =random().nextInt(2) + 1; + int iterations = 0; + while (iterations < MAX_SHAPE_ITERATIONS) { + iterations++; + List polDescription = new ArrayList<>(); + while(polDescription.size() < polygonsCount){ + int vertexCount = random().nextInt(14) + 3; + List geoPoints = points(vertexCount,planetModel, constraints); + orderPoints(geoPoints); + polDescription.add(new GeoPolygonFactory.PolygonDescription(geoPoints)); + } + try { + GeoPolygon polygon = GeoPolygonFactory.makeLargeGeoPolygon(planetModel,polDescription); + if (!constraints.valid(polygon)) { + continue; + } + return polygon; + } catch (IllegalArgumentException e) { + continue; + } + } + return null; + } + + /** + * Method that returns a random generated a concave square GeoPolygon under given constraints. Returns + * NULL if it cannot build the concave GeoPolygon under the given constraints. This shape is an utility + * to build constraints. + * + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoPolygon. + */ + private GeoPolygon simpleConvexPolygon(PlanetModel planetModel, Constraints constraints) { + int iterations = 0; + while (iterations < MAX_SHAPE_ITERATIONS) { + iterations++; + List points = points(3,planetModel,constraints); + points = orderPoints(points); + try { + GeoPolygon polygon = GeoPolygonFactory.makeGeoConvexPolygon(planetModel, points); + if(!constraints.valid(polygon) || isConcave(planetModel,polygon)){ + continue; + } + return polygon; + } catch (IllegalArgumentException e) { + continue; + } + } + return null; + } + + /** + * Method that returns a random generated a convex square GeoPolygon under given constraints. Returns + * NULL if it cannot build the convex GeoPolygon under the given constraints. This shape is an utility + * to build constraints. + * + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated GeoPolygon. + */ + private GeoPolygon concaveSimplePolygon(PlanetModel planetModel, Constraints constraints) { + int iterations = 0; + while (iterations < MAX_SHAPE_ITERATIONS) { + iterations++; + List points = points(3, planetModel, constraints); + points = orderPoints(points); + Collections.reverse(points); + try { + GeoPolygon polygon = GeoPolygonFactory.makeGeoConcavePolygon(planetModel, points); + if(!constraints.valid(polygon) || isConvex(planetModel, polygon)){ + continue; + } + return polygon; + } catch (IllegalArgumentException e) { + continue; + } + } + return null; + } + + /** + * Method that returns a random list of generated GeoPoints under given constraints. If it cannot + * find a point it will add a point that might not comply with the constraints. + * + * @param count The number of points + * @param planetModel The planet model. + * @param constraints The given constraints. + * @return The random generated List of GeoPoints. + */ + private List points(int count, PlanetModel planetModel, Constraints constraints){ + List geoPoints = new ArrayList<>(count); + for(int i= 0; i< count; i++) { + GeoPoint point = randomGeoPoint(planetModel, constraints); + if (point == null){ + point = randomGeoPoint(planetModel, new Constraints()); + } + geoPoints.add(point); + } + return geoPoints; + } + + /** + * Check if a GeoPolygon is pure concave. Note that our definition for concavity is that the polygon + * contains antipodal points. + * + * @param planetModel The planet model. + * @param shape The polygon to check. + * @return True if the polygon contains antipodal points. + */ + private boolean isConcave(PlanetModel planetModel, GeoPolygon shape){ + return (shape.isWithin(planetModel.NORTH_POLE) && shape.isWithin(planetModel.SOUTH_POLE))|| + (shape.isWithin(planetModel.MAX_X_POLE) && shape.isWithin(planetModel.MIN_X_POLE)) || + (shape.isWithin(planetModel.MAX_Y_POLE) && shape.isWithin(planetModel.MIN_Y_POLE)); + } + + /** + * Check if a GeoPolygon is pure convex. Note that our definition for convexity is that the polygon + * does not contain antipodal points. + * + * @param planetModel The planet model. + * @param shape The polygon to check. + * @return True if the polygon dies not contains antipodal points. + */ + private boolean isConvex(PlanetModel planetModel, GeoPolygon shape){ + return !isConcave(planetModel,shape); + } + + /** + * Generates a random number between 0 and PI. + * + * @return the cutoff angle. + */ + private double randomCutoffAngle() { + while(true) { + double radius = randomDouble(); + if (radius <0 || radius > Math.PI){ + continue; + } + return radius; + } + } + + /** + * Method that orders a lit of points anti-clock-wise to prevent crossing edges. + * + * @param originalPoints The points to order. + * @return The list of ordered points anti-clockwise. + */ + private List orderPoints(List originalPoints){ + List points = new ArrayList<>(originalPoints.size()); + points.addAll(originalPoints); //make a copy + GeoPoint lPoint = getPointLefLon(points); + points.remove(lPoint); + GeoPoint rPoint = getPointRigthLon(points); + points.remove(rPoint); + List APoints = getPointsBelowAndSort(points, lPoint); + List BPoints = getPointsAboveAndsort(points, lPoint); + List result = new ArrayList<>(); + result.add(lPoint); + result.addAll(APoints); + result.add(rPoint); + result.addAll(BPoints); + return result; + } + + private List getPointsAboveAndsort(List points,GeoPoint lPoint) { + List BPoints = new ArrayList<>(); + for (GeoPoint point : points){ + if(point.getLatitude() > lPoint.getLatitude()){ + BPoints.add(point); + } + } + Collections.sort(BPoints, new Comparator() { + public int compare(GeoPoint idx1, GeoPoint idx2) { + return Double.compare(idx1.getLongitude(), idx2.getLongitude()); + } + }); + return BPoints; + } + + private List getPointsBelowAndSort(List points,GeoPoint lPoint) { + List APoints = new ArrayList<>(); + for (GeoPoint point : points){ + if(point.getLatitude() < lPoint.getLatitude()){ + APoints.add(point); + } + } + Collections.sort(APoints, new Comparator() { + public int compare(GeoPoint idx1, GeoPoint idx2) { + return Double.compare(idx1.getLongitude(), idx2.getLongitude()); + } + }); + return APoints; + } + + private GeoPoint getPointLefLon(List points) { + GeoPoint lPoint = null; + for (GeoPoint point : points){ + if(lPoint == null ){ + lPoint = point; + } + else{ + if (lPoint.getLongitude() > point.getLongitude()){ + lPoint = point; + } + } + } + return lPoint; + } + + private GeoPoint getPointRigthLon(List points) { + GeoPoint rPoint = null; + for (GeoPoint point : points){ + if(rPoint == null ){ + rPoint = point; + } + else{ + if (rPoint.getLongitude() < point.getLongitude()){ + rPoint = point; + } + } + } + return rPoint; + } + + /** + * Class that holds the constraints that are given to + * build shapes. It consists in a list of GeoAreaShapes + * and relationships the new shape needs to satisfy. + */ + class Constraints extends HashMap{ + + /** + * Check if the shape is valid under the constraints. + * + * @param shape The shape to check + * @return true if the shape satisfy the constraints, else false. + */ + public boolean valid(GeoShape shape) { + if (shape == null){ + return false; + } + for (GeoAreaShape constraint : keySet()) { + if (constraint.getRelationship(shape) != get(constraint)) { + return false; + } + } + return true; + } + + /** + * Check if a point is Within the constraints. + * + * @param point The point to check + * @return true if the point satisfy the constraints, else false. + */ + public boolean isWithin(GeoPoint point) { + for (GeoShape constraint : keySet()) { + if (!(validPoint(point, constraint, get(constraint)))) { + return false; + } + } + return true; + } + + /** + * Check if a point is Within one constraint given by a shape and a relationship. + * + * @param point The point to check + * @param shape The shape of the constraint + * @param relationship The relationship of the constraint. + * @return true if the point satisfy the constraint, else false. + */ + private boolean validPoint(GeoPoint point, GeoShape shape, int relationship) { + //For GeoCompositeMembershipShape we only consider the first shape to help + // converging + if (relationship == GeoArea.WITHIN && shape instanceof GeoCompositeMembershipShape) { + shape = (((GeoCompositeMembershipShape) shape).shapes.get(0)); + } + switch (relationship) { + case GeoArea.DISJOINT: + return !shape.isWithin(point); + case GeoArea.OVERLAPS: + return true; + case GeoArea.CONTAINS: + return !shape.isWithin(point); + case GeoArea.WITHIN: + return shape.isWithin(point); + default: + return true; + } + } + + /** + * Collect the CONTAINS constraints in the object + * + * @return the CONTAINS constraints. + */ + public Constraints getContains(){ + return getConstraintsOfType(GeoArea.CONTAINS); + } + + /** + * Collect the WITHIN constraints in the object + * + * @return the WITHIN constraints. + */ + public Constraints getWithin(){ + return getConstraintsOfType(GeoArea.WITHIN); + } + + /** + * Collect the OVERLAPS constraints in the object + * + * @return the OVERLAPS constraints. + */ + public Constraints getOverlaps(){ + return getConstraintsOfType(GeoArea.OVERLAPS); + } + + /** + * Collect the DISJOINT constraints in the object + * + * @return the DISJOINT constraints. + */ + public Constraints getDisjoint(){ + return getConstraintsOfType(GeoArea.DISJOINT); + } + + private Constraints getConstraintsOfType(int type){ + Constraints constraints = new Constraints(); + for (GeoAreaShape constraint : keySet()) { + if (type == get(constraint)) { + constraints.put(constraint, type); + } + } + return constraints; + } + } +} + + + diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeRelationshipTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeRelationshipTest.java new file mode 100644 index 00000000000..fe1b1d9868e --- /dev/null +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeRelationshipTest.java @@ -0,0 +1,240 @@ +package org.apache.lucene.spatial3d.geom; + +import com.carrotsearch.randomizedtesting.annotations.Repeat; +import org.junit.Test; + +/** + * Random test to check relationship between GeoAreaShapes and GeoShapes. + */ +public class RandomGeoShapeRelationshipTest extends RandomGeoShapeGenerator { + + + /** + * Test for WITHIN points. We build a WITHIN shape with respect the geoAreaShape + * and create a point WITHIN the shape. The resulting shape should be WITHIN + * the original shape. + * + */ + @Test + @Repeat(iterations = 5) + public void testRandomPointWithin() { + int referenceShapeType = CONVEX_POLYGON; + PlanetModel planetModel = randomPlanetModel(); + int shapeType = randomShapeType(); + GeoAreaShape shape = null; + GeoPoint point = null; + while (point == null) { + shape = randomGeoAreaShape(shapeType, planetModel); + Constraints constraints = getEmptyConstraint(); + constraints.put(shape, GeoArea.WITHIN); + GeoAreaShape reference = randomGeoAreaShape(referenceShapeType, planetModel, constraints); + if (reference != null) { + constraints = new Constraints(); + constraints.put(reference, GeoArea.WITHIN); + point = randomGeoPoint(planetModel, constraints); + } + } + assertTrue(shape.isWithin(point)); + } + + /** + * Test for NOT WITHIN points. We build a DIJOINT shape with respect the geoAreaShape + * and create a point WITHIN that shape. The resulting shape should not be WITHIN + * the original shape. + * + */ + @Repeat(iterations = 5) + public void testRandomPointNotWithin() { + int referenceShapeType = CONVEX_POLYGON; + PlanetModel planetModel = randomPlanetModel(); + int shapeType = randomShapeType(); + GeoAreaShape shape = null; + GeoPoint point = null; + while (point == null) { + shape = randomGeoAreaShape(shapeType, planetModel); + Constraints constraints = getEmptyConstraint(); + constraints.put(shape, GeoArea.DISJOINT); + GeoAreaShape reference = randomGeoAreaShape(referenceShapeType, planetModel, constraints); + if (reference != null) { + constraints = new Constraints(); + constraints.put(reference, GeoArea.WITHIN); + point = randomGeoPoint(planetModel, constraints); + } + } + assertFalse(shape.isWithin(point)); + } + + /** + * Test for disjoint shapes. We build a DISJOINT shape with respect the geoAreaShape + * and create shapes WITHIN that shapes. The resulting shape should be DISJOINT + * to the geoAreaShape. + * + * Note that both shapes cannot be concave. + */ + @Test + @Repeat(iterations = 5) + public void testRandomDisjoint() { + int referenceShapeType = CONVEX_SIMPLE_POLYGON; + PlanetModel planetModel = randomPlanetModel(); + int geoAreaShapeType = randomGeoAreaShapeType(); + int shapeType =randomConvexShapeType(); + + GeoShape shape = null; + GeoAreaShape geoAreaShape = null; + while (shape == null) { + geoAreaShape = randomGeoAreaShape(geoAreaShapeType, planetModel); + Constraints constraints = new Constraints(); + constraints.put(geoAreaShape, GeoArea.DISJOINT); + GeoAreaShape reference = randomGeoAreaShape(referenceShapeType, planetModel, constraints); + if (reference != null) { + constraints = getEmptyConstraint(); + constraints.put(reference, GeoArea.WITHIN); + shape = randomGeoShape(shapeType, planetModel, constraints); + } + } + int rel = geoAreaShape.getRelationship(shape); + assertEquals(GeoArea.DISJOINT, rel); + if (shape instanceof GeoArea) { + rel = ((GeoArea)shape).getRelationship(geoAreaShape); + assertEquals(GeoArea.DISJOINT, rel); + } + } + + /** + * Test for within shapes. We build a shape WITHIN the geoAreaShape and create + * shapes WITHIN that shape. The resulting shape should be WITHIN + * to the geoAreaShape. + * + * Note that if the geoAreaShape is not concave the other shape must be not concave. + */ + @Test + @Repeat(iterations = 5) + public void testRandomWithIn() { + PlanetModel planetModel = randomPlanetModel(); + int geoAreaShapeType = randomGeoAreaShapeType(); + int shapeType =randomShapeType(); + int referenceShapeType = CONVEX_SIMPLE_POLYGON; + if (!isConcave(geoAreaShapeType)){ + shapeType =randomConvexShapeType(); + } + if(isConcave(shapeType)){//both concave + referenceShapeType = CONCAVE_SIMPLE_POLYGON; + } + GeoShape shape = null; + GeoAreaShape geoAreaShape = null; + while (shape == null) { + geoAreaShape = randomGeoAreaShape(geoAreaShapeType, planetModel); + Constraints constraints = new Constraints(); + constraints.put(geoAreaShape, GeoArea.WITHIN); + GeoAreaShape reference = randomGeoAreaShape(referenceShapeType, planetModel, constraints); + if (reference != null) { + constraints = new Constraints(); + constraints.put(reference, GeoArea.WITHIN); + shape = randomGeoShape(shapeType, planetModel, constraints); + } + } + int rel = geoAreaShape.getRelationship(shape); + assertEquals(GeoArea.WITHIN, rel); + if (shape instanceof GeoArea) { + rel = ((GeoArea)shape).getRelationship(geoAreaShape); + assertEquals(GeoArea.CONTAINS, rel); + } + } + + + /** + * Test for contains shapes. We build a shape containing the geoAreaShape and create + * shapes WITHIN that shape. The resulting shape should CONTAIN + * the geoAreaShape. + * + * Note that if the geoAreaShape is concave the other shape must be concave. + * If shape is concave, the shape for reference should be concave as well. + * + */ + @Test + @Repeat(iterations = 1) + public void testRandomContains() { + int referenceShapeType = CONVEX_SIMPLE_POLYGON; + PlanetModel planetModel = randomPlanetModel(); + int geoAreaShapeType = randomGeoAreaShapeType(); + while (geoAreaShapeType == COLLECTION){ + geoAreaShapeType = randomGeoAreaShapeType(); + } + int shapeType = randomShapeType(); + if (isConcave(geoAreaShapeType)){ + shapeType = randomConcaveShapeType(); + } + if (isConcave(shapeType)){ + referenceShapeType = CONCAVE_SIMPLE_POLYGON; + } + GeoShape shape = null; + GeoAreaShape geoAreaShape = null; + while (shape == null) { + geoAreaShape = randomGeoAreaShape(geoAreaShapeType, planetModel); + Constraints constraints = getEmptyConstraint(); + constraints.put(geoAreaShape, GeoArea.CONTAINS); + GeoPolygon reference =(GeoPolygon)randomGeoAreaShape(referenceShapeType, planetModel, constraints); + if (reference != null) { + constraints = getEmptyConstraint(); + constraints.put(reference, GeoArea.CONTAINS); + shape = randomGeoShape(shapeType, planetModel, constraints); + } + } + int rel = geoAreaShape.getRelationship(shape); + assertEquals(GeoArea.CONTAINS, rel); + if (shape instanceof GeoArea) { + rel = ((GeoArea)shape).getRelationship(geoAreaShape); + assertEquals(GeoArea.WITHIN, rel); + } + } + + /** + * Test for overlapping shapes. We build a shape that contains part of the + * geoAreaShape, is disjoint to other part and contains a disjoint shape. We create + * shapes according the criteria. The resulting shape should OVERLAP + * the geoAreaShape. + */ + @Test + @Repeat(iterations = 5) + public void testRandomOverlaps() { + PlanetModel planetModel = randomPlanetModel(); + int geoAreaShapeType = randomGeoAreaShapeType(); + int shapeType = randomShapeType(); + + GeoShape shape = null; + GeoAreaShape geoAreaShape = null; + while (shape == null) { + geoAreaShape = randomGeoAreaShape(geoAreaShapeType, planetModel); + Constraints constraints = getEmptyConstraint(); + constraints.put(geoAreaShape,GeoArea.WITHIN); + GeoAreaShape reference1 = randomGeoAreaShape(CONVEX_SIMPLE_POLYGON, planetModel, constraints); + if (reference1 == null){ + continue; + } + constraints = getEmptyConstraint(); + constraints.put(geoAreaShape, GeoArea.WITHIN); + constraints.put(reference1, GeoArea.DISJOINT); + GeoAreaShape reference2 = randomGeoAreaShape(CONVEX_SIMPLE_POLYGON, planetModel, constraints); + if (reference2 == null){ + continue; + } + constraints = getEmptyConstraint(); + constraints.put(geoAreaShape, GeoArea.DISJOINT); + GeoAreaShape reference3 = randomGeoAreaShape(CONVEX_SIMPLE_POLYGON, planetModel, constraints); + if (reference3 != null) { + constraints = new Constraints(); + constraints.put(reference1, GeoArea.DISJOINT); + constraints.put(reference2, GeoArea.CONTAINS); + constraints.put(reference3, GeoArea.CONTAINS); + shape = randomGeoShape(shapeType, planetModel, constraints); + } + } + int rel = geoAreaShape.getRelationship(shape); + assertEquals(GeoArea.OVERLAPS, rel); + if (shape instanceof GeoArea) { + rel = ((GeoArea)shape).getRelationship(geoAreaShape); + assertEquals(GeoArea.OVERLAPS, rel); + } + } +} + diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/SimpleGeoPolygonRelationshipsTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/SimpleGeoPolygonRelationshipsTest.java new file mode 100644 index 00000000000..0d336396ba9 --- /dev/null +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/SimpleGeoPolygonRelationshipsTest.java @@ -0,0 +1,837 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one or more +* contributor license agreements. See the NOTICE file distributed with +* this work for additional information regarding copyright ownership. +* The ASF licenses this file to You under the Apache License, Version 2.0 +* (the "License"); you may not use this file except in compliance with +* the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +package org.apache.lucene.spatial3d.geom; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.junit.Test; + +import static org.junit.Assert.assertEquals; + +/** + * Check relationship between polygon and GeoShapes of basic polygons. Normally we construct + * the convex, concave counterpart and the convex polygon as a complex polygon. + */ +public class SimpleGeoPolygonRelationshipsTest { + + /** + * Test with two shapes with no crossing edges and no points in common in convex case. + */ + @Test + public void testGeoSimplePolygon1() { + + //POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 -60.257713, 19.845091 -60.452631)) disjoint + GeoPolygon originalConvexPol = buildConvexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalConcavePol = buildConcaveGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalComplexPol = buildComplexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon polConvex = buildConvexGeoPolygon(20.0, -60.4, + 20.1, -60.4, + 20.1, -60.3, + 20.0, -60.3); + + GeoPolygon polConcave = buildConcaveGeoPolygon(20.0, -60.4, + 20.1, -60.4, + 20.1, -60.3, + 20.0, -60.3); + + + //Convex + int rel = originalConvexPol.getRelationship(polConvex); + assertEquals(GeoArea.DISJOINT, rel); + rel = polConvex.getRelationship(originalConvexPol); + assertEquals(GeoArea.DISJOINT, rel); + + rel = originalConvexPol.getRelationship(polConcave); + assertEquals(GeoArea.CONTAINS, rel); + rel = polConcave.getRelationship(originalConvexPol); + assertEquals(GeoArea.WITHIN, rel);//Check + + //Concave + rel = originalConcavePol.getRelationship(polConvex); + assertEquals(GeoArea.WITHIN, rel); + rel = polConvex.getRelationship(originalConcavePol); + assertEquals(GeoArea.CONTAINS, rel); + + rel = originalConcavePol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConcavePol); + assertEquals(GeoArea.OVERLAPS, rel); + + //Complex + rel = originalComplexPol.getRelationship(polConvex); + assertEquals(GeoArea.DISJOINT, rel); + rel = polConvex.getRelationship(originalComplexPol); + assertEquals(GeoArea.DISJOINT, rel); + + rel = originalComplexPol.getRelationship(polConcave); + assertEquals(GeoArea.CONTAINS, rel); + rel = polConcave.getRelationship(originalComplexPol); + assertEquals(GeoArea.WITHIN, rel); + } + + + /** + * Test with two shapes with crossing edges and some points inside in convex case. + */ + @Test + public void testGeoSimplePolygon2() { + + //POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 -60.257713, 19.845091 -60.452631)) disjoint + GeoPolygon originalConvexPol = buildConvexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalConcavePol = buildConcaveGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalComplexPol = buildComplexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + //POLYGON ((20.0 -60.4, 23.1 -60.4, 23.1 -60.3, 20.0 -60.3,20.0 -60.4)) + GeoPolygon polConvex = buildConvexGeoPolygon(20.0, -60.4, + 23.1, -60.4, + 23.1, -60.3, + 20.0, -60.3); + + GeoPolygon polConcave = buildConcaveGeoPolygon(20.0, -60.4, + 23.1, -60.4, + 23.1, -60.3, + 20.0, -60.3); + + //Convex + int rel = originalConvexPol.getRelationship(polConvex); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConvex.getRelationship(originalConvexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + rel = originalConvexPol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConvexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + //Concave + rel = originalConcavePol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConcavePol); + assertEquals(GeoArea.OVERLAPS, rel); + + + rel = originalConcavePol.getRelationship(polConvex); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConvex.getRelationship(originalConcavePol); + assertEquals(GeoArea.OVERLAPS, rel); + + //Complex + rel = originalComplexPol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalComplexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + + rel = originalComplexPol.getRelationship(polConvex); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConvex.getRelationship(originalComplexPol); + assertEquals(GeoArea.OVERLAPS, rel); + } + + /** + * Test with two shapes with no crossing edges and all points inside in convex case. + */ + @Test + public void testGeoSimplePolygon3() { + + //POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 -60.257713, 19.845091 -60.452631)) disjoint + GeoPolygon originalConvexPol = buildConvexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalConcavePol = buildConcaveGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalComplexPol = buildComplexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + //POLYGON ((20.0 -61.1, 20.1 -61.1, 20.1 -60.5, 20.0 -60.5,20.0 -61.1)) + GeoPolygon polConvex = buildConvexGeoPolygon(20.0, -61.1, + 20.1, -61.1, + 20.1, -60.5, + 20.0, -60.5); + + GeoPolygon polConcave = buildConcaveGeoPolygon(20.0, -61.1, + 20.1, -61.1, + 20.1, -60.5, + 20.0, -60.5); + + //Convex + int rel = originalConvexPol.getRelationship(polConvex); + assertEquals(GeoArea.WITHIN, rel); + rel = polConvex.getRelationship(originalConvexPol); + assertEquals(GeoArea.CONTAINS, rel); + + rel = originalConvexPol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConvexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + //Concave + rel = originalConcavePol.getRelationship(polConcave); + assertEquals(GeoArea.CONTAINS, rel); + rel = polConcave.getRelationship(originalConcavePol); + assertEquals(GeoArea.WITHIN, rel);//check + + rel = originalConcavePol.getRelationship(polConvex); + assertEquals(GeoArea.DISJOINT, rel); + rel = polConvex.getRelationship(originalConcavePol); + assertEquals(GeoArea.DISJOINT, rel); + + //Complex + rel = originalComplexPol.getRelationship(polConvex); + assertEquals(GeoArea.WITHIN, rel); + rel = polConvex.getRelationship(originalComplexPol); + assertEquals(GeoArea.CONTAINS, rel); + + rel = originalComplexPol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalComplexPol); + assertEquals(GeoArea.OVERLAPS, rel); + } + + /** + * Test with two shapes with crossing edges and no points inside in convex case. + */ + @Test + public void testGeoSimplePolygon4() { + //POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 -60.257713, 19.845091 -60.452631)) disjoint + GeoPolygon originalConvexPol = buildConvexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalConcavePol = buildConcaveGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalComplexPol = buildComplexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + //POLYGON ((20.0 -62.4, 20.1 -62.4, 20.1 -60.3, 20.0 -60.3,20.0 -62.4)) intersects no points inside + GeoPolygon polConvex = buildConvexGeoPolygon(20.0, -62.4, + 20.1, -62.4, + 20.1, -60.3, + 20.0, -60.3); + + GeoPolygon polConcave = buildConcaveGeoPolygon(20.0, -62.4, + 20.1, -62.4, + 20.1, -60.3, + 20.0, -60.3); + + //Convex + int rel = originalConvexPol.getRelationship(polConvex); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConvex.getRelationship(originalConvexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + rel = originalConvexPol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConvexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + //concave + rel = originalConcavePol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConcavePol); + assertEquals(GeoArea.OVERLAPS, rel); + + rel = originalConcavePol.getRelationship(polConvex); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConvex.getRelationship(originalConcavePol); + assertEquals(GeoArea.OVERLAPS, rel); + + //Complex + rel = originalComplexPol.getRelationship(polConvex); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConvex.getRelationship(originalComplexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + rel = originalComplexPol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalComplexPol); + assertEquals(GeoArea.OVERLAPS, rel); + } + + /** + * Test with two shapes with no crossing edges and polygon in hole in convex case. + */ + @Test + public void testGeoSimplePolygonWithHole1() { + //POLYGON((-135 -31, -135 -30, -137 -30, -137 -31, -135 -31),(-135.5 -30.7, -135.5 -30.4, -136.5 -30.4, -136.5 -30.7, -135.5 -30.7)) + GeoPolygon hole = buildConcaveGeoPolygon(-135.5, -30.7, + -135.5, -30.4, + -136.5, -30.4, + -136.5, -30.7); + GeoPolygon originalConvexPol = buildConvexGeoPolygonWithHole(-135, -31, + -135, -30, + -137, -30, + -137, -31, hole); + + GeoPolygon holeInv = buildConvexGeoPolygon(-135, -31, + -135, -30, + -137, -30, + -137, -31); + + GeoPolygon originalConvexPolInv = buildConcaveGeoPolygonWithHole(-135.5, -30.7, + -135.5, -30.4, + -136.5, -30.4, + -136.5, -30.7, holeInv); + + //POLYGON((-135.7 -30.6, -135.7 -30.45, -136 -30.45, -136 -30.6, -135.7 -30.6)) in the hole + GeoPolygon polConvex = buildConvexGeoPolygon(-135.7, -30.6, + -135.7, -30.45, + -136, -30.45, + -136, -30.6); + + GeoPolygon polConcave = buildConcaveGeoPolygon(-135.7, -30.6, + -135.7, -30.45, + -136, -30.45, + -136, -30.6); + + int rel = originalConvexPol.getRelationship(polConvex); + assertEquals(GeoArea.DISJOINT, rel); + rel = polConvex.getRelationship(originalConvexPol); + assertEquals(GeoArea.DISJOINT, rel); + + rel = originalConvexPol.getRelationship(polConcave); + assertEquals(GeoArea.CONTAINS, rel); + rel = polConcave.getRelationship(originalConvexPol); + assertEquals(GeoArea.WITHIN, rel); + + rel = originalConvexPolInv.getRelationship(polConvex); + assertEquals(GeoArea.DISJOINT, rel); + rel = polConvex.getRelationship(originalConvexPolInv); + assertEquals(GeoArea.DISJOINT, rel); + + rel = originalConvexPolInv.getRelationship(polConcave); + assertEquals(GeoArea.CONTAINS, rel); + rel = polConcave.getRelationship(originalConvexPolInv); + assertEquals(GeoArea.WITHIN, rel); + } + + /** + * Test with two shapes with crossing edges in hole and some points inside in convex case. + */ + @Test + public void testGeoSimplePolygonWithHole2() { + //POLYGON((-135 -31, -135 -30, -137 -30, -137 -31, -135 -31),(-135.5 -30.7, -135.5 -30.4, -136.5 -30.4, -136.5 -30.7, -135.5 -30.7)) + GeoPolygon hole = buildConcaveGeoPolygon(-135.5, -30.7, + -135.5, -30.4, + -136.5, -30.4, + -136.5, -30.7); + GeoPolygon originalConvexPol = buildConvexGeoPolygonWithHole(-135, -31, + -135, -30, + -137, -30, + -137, -31, hole); + + GeoPolygon holeInv = buildConvexGeoPolygon(-135, -31, + -135, -30, + -137, -30, + -137, -31); + + GeoPolygon originalConvexPolInv = buildConcaveGeoPolygonWithHole(-135.5, -30.7, + -135.5, -30.4, + -136.5, -30.4, + -136.5, -30.7, holeInv); + + //POLYGON((-135.5 -31.2, -135.5 -30.8, -136 -30.8, -136 -31.2, -135.5 -31.2)) intersects the hole + GeoPolygon polConvex = buildConvexGeoPolygon(-135.5, -30.2, + -135.5, -30.8, + -136, -30.8, + -136, -30.2); + + GeoPolygon polConcave = buildConcaveGeoPolygon(-135.5, -30.2, + -135.5, -30.8, + -136, -30.8, + -136, -30.2); + + int rel = originalConvexPol.getRelationship(polConvex); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConvex.getRelationship(originalConvexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + rel = originalConvexPol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConvexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + rel = originalConvexPolInv.getRelationship(polConvex); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConvex.getRelationship(originalConvexPolInv); + assertEquals(GeoArea.OVERLAPS, rel); + + rel = originalConvexPolInv.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConvexPolInv); + assertEquals(GeoArea.OVERLAPS, rel); + } + + /** + * Test with two shapes with crossing edges and some points inside in convex case. + */ + @Test + public void testGeoSimplePolygonWithHole3() { + //POLYGON((-135 -31, -135 -30, -137 -30, -137 -31, -135 -31),(-135.5 -30.7, -135.5 -30.4, -136.5 -30.4, -136.5 -30.7, -135.5 -30.7)) + GeoPolygon hole = buildConcaveGeoPolygon(-135.5, -30.7, + -135.5, -30.4, + -136.5, -30.4, + -136.5, -30.7); + GeoPolygon originalConvexPol = buildConvexGeoPolygonWithHole(-135, -31, + -135, -30, + -137, -30, + -137, -31, hole); + + GeoPolygon holeInv = buildConvexGeoPolygon(-135, -31, + -135, -30, + -137, -30, + -137, -31); + + GeoPolygon originalConvexPolInv = buildConcaveGeoPolygonWithHole(-135.5, -30.7, + -135.5, -30.4, + -136.5, -30.4, + -136.5, -30.7, holeInv); + + //POLYGON((-135.2 -30.8, -135.2 -30.2, -136.8 -30.2, -136.8 -30.8, -135.2 -30.8)) inside the polygon covering the hole + GeoPolygon polConvex = buildConvexGeoPolygon(-135.2, -30.8, + -135.2, -30.3, + -136.8, -30.2, + -136.8, -30.8); + + GeoPolygon polConcave = buildConcaveGeoPolygon(-135.2, -30.8, + -135.2, -30.3, + -136.8, -30.2, + -136.8, -30.8); + + int rel = originalConvexPol.getRelationship(polConvex); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConvex.getRelationship(originalConvexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + rel = originalConvexPol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConvexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + rel = originalConvexPolInv.getRelationship(polConvex); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConvex.getRelationship(originalConvexPolInv); + assertEquals(GeoArea.OVERLAPS, rel); + + rel = originalConvexPolInv.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConvexPolInv); + assertEquals(GeoArea.OVERLAPS, rel); + } + + /** + * Test with two shapes with no crossing edges and all points inside in convex case. + */ + @Test + public void testGeoSimplePolygonWithHole4() { + //POLYGON((-135 -31, -135 -30, -137 -30, -137 -31, -135 -31),(-135.5 -30.7, -135.5 -30.4, -136.5 -30.4, -136.5 -30.7, -135.5 -30.7)) + GeoPolygon hole = buildConcaveGeoPolygon(-135.5, -30.7, + -135.5, -30.4, + -136.5, -30.4, + -136.5, -30.7); + GeoPolygon originalConvexPol = buildConvexGeoPolygonWithHole(-135, -31, + -135, -30, + -137, -30, + -137, -31, hole); + + GeoPolygon holeInv = buildConvexGeoPolygon(-135, -31, + -135, -30, + -137, -30, + -137, -31); + + GeoPolygon originalConvexPolInv = buildConcaveGeoPolygonWithHole(-135.5, -30.7, + -135.5, -30.4, + -136.5, -30.4, + -136.5, -30.7, holeInv); + + // POLYGON((-135.7 -30.3, -135.7 -30.2, -136 -30.2, -136 -30.3, -135.7 -30.3))inside the polygon + GeoPolygon polConvex = buildConvexGeoPolygon(-135.7, -30.3, + -135.7, -30.2, + -136, -30.2, + -136, -30.3); + + GeoPolygon polConcave = buildConcaveGeoPolygon(-135.7, -30.3, + -135.7, -30.2, + -136, -30.2, + -136, -30.3); + + int rel = originalConvexPol.getRelationship(polConvex); + assertEquals(GeoArea.WITHIN, rel); + rel = polConvex.getRelationship(originalConvexPol); + assertEquals(GeoArea.CONTAINS, rel); + + rel = originalConvexPol.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConvexPol); + assertEquals(GeoArea.OVERLAPS, rel); + + rel = originalConvexPolInv.getRelationship(polConvex); + assertEquals(GeoArea.WITHIN, rel); + rel = polConvex.getRelationship(originalConvexPolInv); + assertEquals(GeoArea.CONTAINS, rel); + + rel = originalConvexPolInv.getRelationship(polConcave); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(originalConvexPolInv); + assertEquals(GeoArea.OVERLAPS, rel); + } + + @Test + public void testGeoSimplePolygonWithCircle() { + //POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 -60.257713, 19.845091 -60.452631)) disjoint + GeoPolygon originalConvexPol = buildConvexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalConcavePol = buildConcaveGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalComplexPol = buildComplexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoCircle outCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Math.toRadians(-70), Math.toRadians(23), Math.toRadians(1)); + int rel = originalConvexPol.getRelationship(outCircle); + assertEquals(GeoArea.DISJOINT, rel); + rel = originalConcavePol.getRelationship(outCircle); + assertEquals(GeoArea.WITHIN, rel); + rel = originalComplexPol.getRelationship(outCircle); + assertEquals(GeoArea.DISJOINT, rel); + + GeoCircle overlapCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Math.toRadians(-61.5), Math.toRadians(20), Math.toRadians(1)); + rel = originalConvexPol.getRelationship(overlapCircle); + assertEquals(GeoArea.OVERLAPS, rel); + rel = originalConcavePol.getRelationship(overlapCircle); + assertEquals(GeoArea.OVERLAPS, rel); + rel = originalComplexPol.getRelationship(overlapCircle); + assertEquals(GeoArea.OVERLAPS, rel); + + GeoCircle inCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Math.toRadians(-61), Math.toRadians(21), Math.toRadians(0.1)); + rel = originalConvexPol.getRelationship(inCircle); + assertEquals(GeoArea.WITHIN, rel); + rel = originalConcavePol.getRelationship(inCircle); + assertEquals(GeoArea.DISJOINT, rel); + rel = originalComplexPol.getRelationship(inCircle); + assertEquals(GeoArea.WITHIN, rel); + + GeoCircle onCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Math.toRadians(-61), Math.toRadians(21), Math.toRadians(10.)); + rel = originalConvexPol.getRelationship(onCircle); + assertEquals(GeoArea.CONTAINS, rel); + rel = originalConcavePol.getRelationship(onCircle); + assertEquals(GeoArea.OVERLAPS, rel); + rel = originalComplexPol.getRelationship(onCircle); + assertEquals(GeoArea.CONTAINS, rel); + } + + @Test + public void testGeoSimplePolygonWithBBox() { + //POLYGON ((19.845091 -60.452631, 20.119948 -61.655652, 23.207901 -61.453298, 22.820804 -60.257713, 19.845091 -60.452631)) disjoint + GeoPolygon originalConvexPol = buildConvexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalConcavePol = buildConcaveGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoPolygon originalComplexPol = buildComplexGeoPolygon(19.84509, -60.452631, + 20.119948, -61.655652, + 23.207901, -61.453298, + 22.820804, -60.257713); + + GeoBBox outRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.toRadians(-69), + Math.toRadians(-70), + Math.toRadians(22), + Math.toRadians(23)); + int rel = originalConvexPol.getRelationship(outRectangle); + assertEquals(GeoArea.DISJOINT, rel); + rel = outRectangle.getRelationship(originalConvexPol); + assertEquals(GeoArea.DISJOINT, rel); + rel = originalConcavePol.getRelationship(outRectangle); + assertEquals(GeoArea.WITHIN, rel); + rel = originalComplexPol.getRelationship(outRectangle); + assertEquals(GeoArea.DISJOINT, rel); + + GeoBBox overlapRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.toRadians(-61), + Math.toRadians(-62), + Math.toRadians(22), + Math.toRadians(23)); + rel = originalConvexPol.getRelationship(overlapRectangle); + assertEquals(GeoArea.OVERLAPS, rel); + rel = overlapRectangle.getRelationship(originalConvexPol); + assertEquals(GeoArea.OVERLAPS, rel); + rel = originalConcavePol.getRelationship(overlapRectangle); + assertEquals(GeoArea.OVERLAPS, rel); + rel = originalComplexPol.getRelationship(overlapRectangle); + assertEquals(GeoArea.OVERLAPS, rel); + + GeoBBox inRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.toRadians(-61), + Math.toRadians(-61.1), + Math.toRadians(22.5), + Math.toRadians(23)); + rel = originalConvexPol.getRelationship(inRectangle); + assertEquals(GeoArea.WITHIN, rel); + rel = inRectangle.getRelationship(originalConvexPol); + assertEquals(GeoArea.CONTAINS, rel); + rel = originalConcavePol.getRelationship(inRectangle); + assertEquals(GeoArea.DISJOINT, rel); + rel = originalComplexPol.getRelationship(inRectangle); + assertEquals(GeoArea.WITHIN, rel); + + GeoBBox onRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.toRadians(-59), + Math.toRadians(-64.1), + Math.toRadians(18.5), + Math.toRadians(27)); + rel = originalConvexPol.getRelationship(onRectangle); + assertEquals(GeoArea.CONTAINS, rel); + rel = onRectangle.getRelationship(originalConvexPol); + assertEquals(GeoArea.WITHIN, rel); + rel = originalConcavePol.getRelationship(onRectangle); + assertEquals(GeoArea.OVERLAPS, rel); + rel = originalComplexPol.getRelationship(onRectangle); + assertEquals(GeoArea.CONTAINS, rel); + + } + + @Test + public void testGeoSimplePolygonWithComposite() { + GeoShape shape = getCompositeShape(); + + //POLYGON((-145.8555 -5.13, -145.8540 -5.13, -145.8540 -5.12, -145.8555 -5.12, -145.8555 -5.13)) + GeoPolygon polConvex = buildConvexGeoPolygon(-145.8555, -5.13, + -145.8540, -5.13, + -145.8540, -5.12, + -145.8555, -5.12); + + GeoPolygon polConcave = buildConcaveGeoPolygon(-145.8555, -5.13, + -145.8540, -5.13, + -145.8540, -5.12, + -145.8555, -5.12); + + int rel = polConvex.getRelationship(shape); + assertEquals(GeoArea.DISJOINT, rel); + rel = polConcave.getRelationship(shape); + assertEquals(GeoArea.WITHIN, rel); + + //POLYGON((-145.8555 -5.13, -145.85 -5.13, -145.85 -5.12, -145.8555 -5.12, -145.8555 -5.13)) + polConvex = buildConvexGeoPolygon(-145.8555, -5.13, + -145.85, -5.13, + -145.85, -5.12, + -145.8555, -5.12); + + polConcave = buildConcaveGeoPolygon(-145.8555, -5.13, + -145.85, -5.13, + -145.85, -5.12, + -145.8555, -5.12); + + rel = polConvex.getRelationship(shape); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(shape); + assertEquals(GeoArea.OVERLAPS, rel); + + //POLYGON((-146 -5.18, -145.854 -5.18, -145.854 -5.11, -146 -5.11, -146 -5.18)) + //Case overlaping on of the shapes + polConvex = buildConvexGeoPolygon(-146, -5.18, + -145.854, -5.18, + -145.854, -5.11, + -146, -5.11); + + polConcave = buildConcaveGeoPolygon(-146, -5.18, + -145.854, -5.18, + -145.854, -5.11, + -146, -5.11); + + rel = polConvex.getRelationship(shape); + assertEquals(GeoArea.OVERLAPS, rel); + rel = polConcave.getRelationship(shape); + assertEquals(GeoArea.OVERLAPS, rel); + + //POLYGON((-145.88 -5.13, -145.87 -5.13, -145.87 -5.12, -145.88 -5.12, -145.88 -5.13)) + polConvex = buildConvexGeoPolygon(-145.88, -5.13, + -145.87, -5.13, + -145.87, -5.12, + -145.88, -5.12); + + polConcave = buildConcaveGeoPolygon(-145.88, -5.13, + -145.87, -5.13, + -145.87, -5.12, + -145.88, -5.12); + + rel = polConvex.getRelationship(shape); + assertEquals(GeoArea.CONTAINS, rel); + rel = polConcave.getRelationship(shape); + assertEquals(GeoArea.OVERLAPS, rel); + } + + private GeoPolygon buildConvexGeoPolygon(double lon1, double lat1, + double lon2, double lat2, + double lon3, double lat3, + double lon4, double lat4) { + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + final List points = new ArrayList<>(); + points.add(point1); + points.add(point2); + points.add(point3); + points.add(point4); + return GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points); + } + + private GeoPolygon buildConcaveGeoPolygon(double lon1, double lat1, + double lon2, double lat2, + double lon3, double lat3, + double lon4, double lat4) { + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + final List points = new ArrayList<>(); + points.add(point1); + points.add(point2); + points.add(point3); + points.add(point4); + return GeoPolygonFactory.makeGeoConcavePolygon(PlanetModel.SPHERE, points); + } + + private GeoPolygon buildComplexGeoPolygon(double lon1, double lat1, + double lon2, double lat2, + double lon3, double lat3, + double lon4, double lat4) { + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + final List points = new ArrayList<>(); + points.add(point1); + points.add(point2); + points.add(point3); + points.add(point4); + GeoPolygonFactory.PolygonDescription pd = new GeoPolygonFactory.PolygonDescription(points); + return GeoPolygonFactory.makeLargeGeoPolygon(PlanetModel.SPHERE, Collections.singletonList(pd)); + } + + private GeoPolygon buildConvexGeoPolygonWithHole(double lon1, double lat1, + double lon2, double lat2, + double lon3, double lat3, + double lon4, double lat4, + GeoPolygon hole) { + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + final List points = new ArrayList<>(); + points.add(point1); + points.add(point2); + points.add(point3); + points.add(point4); + //return new GeoConvexPolygon(PlanetModel.SPHERE,points, Collections.singletonList(hole)); + return GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points, Collections.singletonList(hole)); + } + + private GeoPolygon buildConcaveGeoPolygonWithHole(double lon1, double lat1, + double lon2, double lat2, + double lon3, double lat3, + double lon4, double lat4, + GeoPolygon hole) { + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + final List points = new ArrayList<>(); + points.add(point1); + points.add(point2); + points.add(point3); + points.add(point4); + return GeoPolygonFactory.makeGeoConcavePolygon(PlanetModel.SPHERE, points, Collections.singletonList(hole)); + } + + private GeoShape getCompositeShape(){ + //MULTIPOLYGON(((-145.790967486 -5.17543698881, -145.790854979 -5.11348060995, -145.853073512 -5.11339421216, -145.853192037 -5.17535061936, -145.790967486 -5.17543698881)), + //((-145.8563923 -5.17527125408, -145.856222168 -5.11332154814, -145.918433943 -5.11317773171, -145.918610092 -5.17512738429, -145.8563923 -5.17527125408))) + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17543698881), Math.toRadians(-145.790967486)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11348060995), Math.toRadians(-145.790854979)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11339421216), Math.toRadians(-145.853073512)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17535061936), Math.toRadians(-145.853192037)); + final List points1 = new ArrayList<>(); + points1.add(point1); + points1.add(point2); + points1.add(point3); + points1.add(point4); + GeoPolygon pol1 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE,points1); + GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17527125408), Math.toRadians(-145.8563923)); + GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11332154814), Math.toRadians(-145.856222168)); + GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11317773171), Math.toRadians(-145.918433943)); + GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17512738429), Math.toRadians(-145.918610092)); + final List points2 = new ArrayList<>(); + points2.add(point5); + points2.add(point6); + points2.add(point7); + points2.add(point8); + GeoPolygon pol2 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE, points2); + GeoCompositeMembershipShape composite = new GeoCompositeMembershipShape(); + composite.addShape(pol1); + composite.addShape(pol2); + return composite; + } +} From a381bdbaaab60c9d53a574ba1c8a4b4815b51b85 Mon Sep 17 00:00:00 2001 From: Karl Wright Date: Thu, 3 Aug 2017 10:55:17 -0400 Subject: [PATCH 59/95] LUCENE-7906: Fix ant precommit issue with test license header. --- .../geom/RandomGeoShapeRelationshipTest.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeRelationshipTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeRelationshipTest.java index fe1b1d9868e..75a8c285722 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeRelationshipTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/RandomGeoShapeRelationshipTest.java @@ -1,3 +1,20 @@ +/* +* Licensed to the Apache Software Foundation (ASF) under one or more +* contributor license agreements. See the NOTICE file distributed with +* this work for additional information regarding copyright ownership. +* The ASF licenses this file to You under the Apache License, Version 2.0 +* (the "License"); you may not use this file except in compliance with +* the License. You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + package org.apache.lucene.spatial3d.geom; import com.carrotsearch.randomizedtesting.annotations.Repeat; From c297180cca19eff3f9ace04ef8e56e8418151bd0 Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Thu, 3 Aug 2017 16:07:40 +0100 Subject: [PATCH 60/95] SOLR-11163: Fix contrib/ltr Normalizer persistence after solr core reload or restart. (Yuki Yano via Christine Poerschke) --- solr/CHANGES.txt | 3 +++ .../apache/solr/ltr/norm/MinMaxNormalizer.java | 4 ++-- .../solr/ltr/norm/StandardNormalizer.java | 4 ++-- .../solr/ltr/norm/TestMinMaxNormalizer.java | 17 ++++++++++++++++- .../solr/ltr/norm/TestStandardNormalizer.java | 17 ++++++++++++++++- 5 files changed, 39 insertions(+), 6 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index fd8a1e8e238..bb134bb7596 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -415,6 +415,9 @@ Bug Fixes * SOLR-11154: Child documents' return fields now include useDocValuesAsStored fields (Mohammed Sheeri Shaketi Nauage via Ishan Chattopadhyaya) +* SOLR-11163: Fix contrib/ltr Normalizer persistence after solr core reload or restart. + (Yuki Yano via Christine Poerschke) + Optimizations ---------------------- diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java index ff31c0180f8..f6322fefb39 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/MinMaxNormalizer.java @@ -90,8 +90,8 @@ public class MinMaxNormalizer extends Normalizer { @Override public LinkedHashMap paramsToMap() { final LinkedHashMap params = new LinkedHashMap<>(2, 1.0f); - params.put("min", '"'+Float.toString(min)+'"'); - params.put("max", '"'+Float.toString(max)+'"'); + params.put("min", Float.toString(min)); + params.put("max", Float.toString(max)); return params; } diff --git a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java index 57df7b4eb0f..d908ea941ec 100644 --- a/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java +++ b/solr/contrib/ltr/src/java/org/apache/solr/ltr/norm/StandardNormalizer.java @@ -82,8 +82,8 @@ public class StandardNormalizer extends Normalizer { @Override public LinkedHashMap paramsToMap() { final LinkedHashMap params = new LinkedHashMap<>(2, 1.0f); - params.put("avg", '"'+Float.toString(avg)+'"'); - params.put("std", '"'+Float.toString(std)+'"'); + params.put("avg", Float.toString(avg)); + params.put("std", Float.toString(std)); return params; } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java index 794e393a13d..f5e00264d50 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java @@ -40,7 +40,7 @@ public class TestMinMaxNormalizer { final MinMaxNormalizer mmn = (MinMaxNormalizer)n; assertEquals(mmn.getMin(), expectedMin, 0.0); assertEquals(mmn.getMax(), expectedMax, 0.0); - assertEquals("{min=\""+expectedMin+"\", max=\""+expectedMax+"\"}", mmn.paramsToMap().toString()); + assertEquals("{min="+expectedMin+", max="+expectedMax+"}", mmn.paramsToMap().toString()); return n; } @@ -118,4 +118,19 @@ public class TestMinMaxNormalizer { value = 5; assertEquals((value - 5f) / (10f - 5f), n.normalize(value), 0.0001); } + + @Test + public void testParamsToMap() { + final MinMaxNormalizer n1 = new MinMaxNormalizer(); + n1.setMin(5.0f); + n1.setMax(10.0f); + + final Map params = n1.paramsToMap(); + final MinMaxNormalizer n2 = (MinMaxNormalizer) Normalizer.getInstance( + new SolrResourceLoader(), + MinMaxNormalizer.class.getName(), + params); + assertEquals(n1.getMin(), n2.getMin(), 1e-6); + assertEquals(n1.getMax(), n2.getMax(), 1e-6); + } } diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java index 1794686b1bc..c27dcadd865 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java @@ -40,7 +40,7 @@ public class TestStandardNormalizer { final StandardNormalizer sn = (StandardNormalizer)n; assertEquals(sn.getAvg(), expectedAvg, 0.0); assertEquals(sn.getStd(), expectedStd, 0.0); - assertEquals("{avg=\""+expectedAvg+"\", std=\""+expectedStd+"\"}", sn.paramsToMap().toString()); + assertEquals("{avg="+expectedAvg+", std="+expectedStd+"}", sn.paramsToMap().toString()); return n; } @@ -130,4 +130,19 @@ public class TestStandardNormalizer { assertEquals((v - 10f) / (1.5f), norm.normalize(v), 0.0001); } } + + @Test + public void testParamsToMap() { + final StandardNormalizer n1 = new StandardNormalizer(); + n1.setAvg(2.0f); + n1.setStd(3.0f); + + final Map params = n1.paramsToMap(); + final StandardNormalizer n2 = (StandardNormalizer) Normalizer.getInstance( + new SolrResourceLoader(), + StandardNormalizer.class.getName(), + params); + assertEquals(n1.getAvg(), n2.getAvg(), 1e-6); + assertEquals(n1.getStd(), n2.getStd(), 1e-6); + } } From c0a6ffe75d11a8ab9c47bcb6a87ac137e07efb6c Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Thu, 3 Aug 2017 16:16:44 +0100 Subject: [PATCH 61/95] SOLR-11187: contrib/ltr TestModelManagerPersistence improvements. (Yuki Yano via Christine Poerschke) * in testFeaturePersistence() method fix some assertJDelete vs. assertJQ copy/paste type issues * add testFilePersistence() method --- solr/CHANGES.txt | 2 + .../rest/TestModelManagerPersistence.java | 81 +++++++++++++++++-- 2 files changed, 76 insertions(+), 7 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index bb134bb7596..be89d0632ee 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -125,6 +125,8 @@ Other Changes * SOLR-11140: Remove unused parameter in (private) SolrMetricManager.prepareCloudPlugins method. (Omar Abdelnabi via Christine Poerschke) +* SOLR-11187: contrib/ltr TestModelManagerPersistence improvements. (Yuki Yano via Christine Poerschke) + ================== 7.0.0 ================== Versions of Major Components diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java index 9168dd0e95c..9dc28e6fbc2 100644 --- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java +++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/store/rest/TestModelManagerPersistence.java @@ -23,14 +23,15 @@ import org.apache.commons.io.FileUtils; import org.apache.solr.ltr.TestRerankBase; import org.apache.solr.ltr.feature.ValueFeature; import org.apache.solr.ltr.model.LinearModel; -import org.junit.Before; +import org.apache.solr.ltr.store.FeatureStore; +import org.junit.BeforeClass; import org.junit.Test; import org.noggit.ObjectBuilder; public class TestModelManagerPersistence extends TestRerankBase { - @Before - public void init() throws Exception { + @BeforeClass + public static void init() throws Exception { setupPersistenttest(true); } @@ -98,24 +99,90 @@ public class TestModelManagerPersistence extends TestRerankBase { "/responseHeader/status==0"); assertJQ(ManagedFeatureStore.REST_END_POINT + "/test2", "/features/==[]"); - assertJQ(ManagedModelStore.REST_END_POINT + "/test-model2", + assertJQ(ManagedModelStore.REST_END_POINT, "/models/[0]/name=='test-model'"); restTestHarness.reload(); assertJQ(ManagedFeatureStore.REST_END_POINT + "/test2", "/features/==[]"); - assertJQ(ManagedModelStore.REST_END_POINT + "/test-model2", + assertJQ(ManagedModelStore.REST_END_POINT, "/models/[0]/name=='test-model'"); - assertJDelete(ManagedModelStore.REST_END_POINT + "/test-model1", + assertJDelete(ManagedModelStore.REST_END_POINT + "/test-model", "/responseHeader/status==0"); assertJDelete(ManagedFeatureStore.REST_END_POINT + "/test1", "/responseHeader/status==0"); assertJQ(ManagedFeatureStore.REST_END_POINT + "/test1", "/features/==[]"); + assertJQ(ManagedModelStore.REST_END_POINT, + "/models/==[]"); restTestHarness.reload(); assertJQ(ManagedFeatureStore.REST_END_POINT + "/test1", "/features/==[]"); - + assertJQ(ManagedModelStore.REST_END_POINT, + "/models/==[]"); } + @Test + public void testFilePersistence() throws Exception { + // check whether models and features are empty + assertJQ(ManagedModelStore.REST_END_POINT, + "/models/==[]"); + assertJQ(ManagedFeatureStore.REST_END_POINT + "/" + FeatureStore.DEFAULT_FEATURE_STORE_NAME, + "/features/==[]"); + + // load models and features from files + loadFeatures("features-linear.json"); + loadModels("linear-model.json"); + + // check loaded models and features + final String modelName = "6029760550880411648"; + assertJQ(ManagedModelStore.REST_END_POINT, + "/models/[0]/name=='"+modelName+"'"); + assertJQ(ManagedFeatureStore.REST_END_POINT + "/" + FeatureStore.DEFAULT_FEATURE_STORE_NAME, + "/features/[0]/name=='title'"); + assertJQ(ManagedFeatureStore.REST_END_POINT + "/" + FeatureStore.DEFAULT_FEATURE_STORE_NAME, + "/features/[1]/name=='description'"); + + // check persistence after reload + restTestHarness.reload(); + assertJQ(ManagedModelStore.REST_END_POINT, + "/models/[0]/name=='"+modelName+"'"); + assertJQ(ManagedFeatureStore.REST_END_POINT + "/" + FeatureStore.DEFAULT_FEATURE_STORE_NAME, + "/features/[0]/name=='title'"); + assertJQ(ManagedFeatureStore.REST_END_POINT + "/" + FeatureStore.DEFAULT_FEATURE_STORE_NAME, + "/features/[1]/name=='description'"); + + // check persistence after restart + jetty.stop(); + jetty.start(); + assertJQ(ManagedModelStore.REST_END_POINT, + "/models/[0]/name=='"+modelName+"'"); + assertJQ(ManagedFeatureStore.REST_END_POINT + "/" + FeatureStore.DEFAULT_FEATURE_STORE_NAME, + "/features/[0]/name=='title'"); + assertJQ(ManagedFeatureStore.REST_END_POINT + "/" + FeatureStore.DEFAULT_FEATURE_STORE_NAME, + "/features/[1]/name=='description'"); + + // delete loaded models and features + restTestHarness.delete(ManagedModelStore.REST_END_POINT + "/"+modelName); + restTestHarness.delete(ManagedFeatureStore.REST_END_POINT + "/" + FeatureStore.DEFAULT_FEATURE_STORE_NAME); + assertJQ(ManagedModelStore.REST_END_POINT, + "/models/==[]"); + assertJQ(ManagedFeatureStore.REST_END_POINT + "/" + FeatureStore.DEFAULT_FEATURE_STORE_NAME, + "/features/==[]"); + + // check persistence after reload + restTestHarness.reload(); + assertJQ(ManagedModelStore.REST_END_POINT, + "/models/==[]"); + assertJQ(ManagedFeatureStore.REST_END_POINT + "/" + FeatureStore.DEFAULT_FEATURE_STORE_NAME, + "/features/==[]"); + + // check persistence after restart + jetty.stop(); + jetty.start(); + assertJQ(ManagedModelStore.REST_END_POINT, + "/models/==[]"); + assertJQ(ManagedFeatureStore.REST_END_POINT + "/" + FeatureStore.DEFAULT_FEATURE_STORE_NAME, + "/features/==[]"); + } } From c6ae0496627236b0108941e35cb4646935ad53c3 Mon Sep 17 00:00:00 2001 From: Karl Wright Date: Thu, 3 Aug 2017 11:26:32 -0400 Subject: [PATCH 62/95] LUCENE-7906: Add missing override, fixing javadoc --- .../apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java index c7ad81d99d5..5e2f9975a2b 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java @@ -18,7 +18,7 @@ package org.apache.lucene.spatial3d.geom; /** - * GeoCompositeAreShape is a set of GeoAreaShape's, treated as a unit. + * GeoCompositeAreaShape is a set of GeoAreaShape's, treated as a unit. * * @lucene.experimental */ @@ -37,6 +37,7 @@ public class GeoCompositeAreaShape extends GeoCompositeMembershipShape implement shapes.add(shape); } + @Override public boolean intersects(GeoShape geoShape){ for(GeoShape inShape : shapes){ if (((GeoAreaShape)inShape).intersects(geoShape)){ From dd00446bb912cbb7143d4e8d2394ac9960735f49 Mon Sep 17 00:00:00 2001 From: Karl Wright Date: Thu, 3 Aug 2017 11:43:42 -0400 Subject: [PATCH 63/95] LUCENE-7906: Add missing constructor to make precommit happy --- .../apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java index 5e2f9975a2b..8671e624a27 100644 --- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java +++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/geom/GeoCompositeAreaShape.java @@ -24,6 +24,11 @@ package org.apache.lucene.spatial3d.geom; */ public class GeoCompositeAreaShape extends GeoCompositeMembershipShape implements GeoAreaShape { + /** Constructor. + */ + public GeoCompositeAreaShape() { + } + /** * Add a shape to the composite. It throw an IllegalArgumentException * if the shape is not a GeoAreaShape From 78b6e5031a49b7328cb27704a583044ebbcfe91d Mon Sep 17 00:00:00 2001 From: Christine Poerschke Date: Thu, 3 Aug 2017 17:08:29 +0100 Subject: [PATCH 64/95] Correct SOLR_STOP_WAIT value in Solr Ref Guide (it changed from 5 to 180s in SOLR-9371). --- solr/solr-ref-guide/src/solr-control-script-reference.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/solr/solr-ref-guide/src/solr-control-script-reference.adoc b/solr/solr-ref-guide/src/solr-control-script-reference.adoc index 588867145ee..735d438cd9e 100644 --- a/solr/solr-ref-guide/src/solr-control-script-reference.adoc +++ b/solr/solr-ref-guide/src/solr-control-script-reference.adoc @@ -221,7 +221,7 @@ The run in-foreground option (`-f`) is not compatible with the `-e` option since === Stop -The `stop` command sends a STOP request to a running Solr node, which allows it to shutdown gracefully. The command will wait up to 5 seconds for Solr to stop gracefully and then will forcefully kill the process (kill -9). +The `stop` command sends a STOP request to a running Solr node, which allows it to shutdown gracefully. The command will wait up to 180 seconds for Solr to stop gracefully and then will forcefully kill the process (kill -9). `bin/solr stop [options]` From 8cd2942e354687b6f655ef831d5d525d63dd46a8 Mon Sep 17 00:00:00 2001 From: Varun Thacker Date: Thu, 3 Aug 2017 09:50:00 -0700 Subject: [PATCH 65/95] SOLR-11182: A split shard failure on IOException should be logged --- solr/CHANGES.txt | 2 ++ .../src/java/org/apache/solr/update/DirectUpdateHandler2.java | 1 + 2 files changed, 3 insertions(+) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index be89d0632ee..012ac868224 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -420,6 +420,8 @@ Bug Fixes * SOLR-11163: Fix contrib/ltr Normalizer persistence after solr core reload or restart. (Yuki Yano via Christine Poerschke) +* SOLR-11182: A split shard failure on IOException should be logged (Varun Thacker) + Optimizations ---------------------- diff --git a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java index 3efb748fd02..bfed1c17d9a 100644 --- a/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java +++ b/solr/core/src/java/org/apache/solr/update/DirectUpdateHandler2.java @@ -911,6 +911,7 @@ public class DirectUpdateHandler2 extends UpdateHandler implements SolrCoreState } catch (IOException e) { numErrors.increment(); numErrorsCumulative.mark(); + throw e; } } From 23d383765b1effaf1df8fd5a0132d32dd17b6840 Mon Sep 17 00:00:00 2001 From: Karl Wright Date: Thu, 3 Aug 2017 17:28:16 -0400 Subject: [PATCH 66/95] LUCENE-7906: More fixes for precommit breakage: can't use Math.toRadians --- .../CompositeGeoPolygonRelationshipsTest.java | 82 ++--- .../lucene/spatial3d/geom/Geo3DUtil.java | 285 ++++++++++++++++++ .../SimpleGeoPolygonRelationshipsTest.java | 96 +++--- 3 files changed, 374 insertions(+), 89 deletions(-) create mode 100644 lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/Geo3DUtil.java diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/CompositeGeoPolygonRelationshipsTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/CompositeGeoPolygonRelationshipsTest.java index f7d660660ce..ffcb5e388d3 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/CompositeGeoPolygonRelationshipsTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/CompositeGeoPolygonRelationshipsTest.java @@ -705,11 +705,11 @@ public class CompositeGeoPolygonRelationshipsTest { double lon4,double lat4, double lon5,double lat5) { - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); - GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat5), Math.toRadians(lon5)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); + GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat5), Geo3DUtil.fromDegrees(lon5)); final List points = new ArrayList<>(); points.add(point1); points.add(point2); @@ -724,10 +724,10 @@ public class CompositeGeoPolygonRelationshipsTest { double lon3,double lat3, double lon4,double lat4) { - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); final List points = new ArrayList<>(); points.add(point1); points.add(point2); @@ -738,14 +738,14 @@ public class CompositeGeoPolygonRelationshipsTest { private GeoPolygon getCompositePolygon(){ //POLYGON((0 80, 45 85 ,90 80,135 85,180 80, -135 85, -90 80, -45 85,0 80)) - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(0)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(45)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(90)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(135)); - GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(180)); - GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(-135)); - GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(-90)); - GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(-45)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(0)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(45)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(90)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(135)); + GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(180)); + GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(-135)); + GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(-90)); + GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(-45)); final List points = new ArrayList<>(); points.add(point1); points.add(point2); @@ -760,14 +760,14 @@ public class CompositeGeoPolygonRelationshipsTest { private GeoPolygon getComplexPolygon(){ //POLYGON((0 80, 45 85 ,90 80,135 85,180 80, -135 85, -90 80, -45 85,0 80)) - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(0)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(45)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(90)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(135)); - GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(180)); - GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(-135)); - GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(80), Math.toRadians(-90)); - GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(85), Math.toRadians(-45)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(0)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(45)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(90)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(135)); + GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(180)); + GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(-135)); + GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(80), Geo3DUtil.fromDegrees(-90)); + GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(85), Geo3DUtil.fromDegrees(-45)); final List points = new ArrayList<>(); points.add(point1); points.add(point2); @@ -784,20 +784,20 @@ public class CompositeGeoPolygonRelationshipsTest { private GeoPolygon getMultiPolygon(){ //MULTIPOLYGON(((-145.790967486 -5.17543698881, -145.790854979 -5.11348060995, -145.853073512 -5.11339421216, -145.853192037 -5.17535061936, -145.790967486 -5.17543698881)), //((-145.8563923 -5.17527125408, -145.856222168 -5.11332154814, -145.918433943 -5.11317773171, -145.918610092 -5.17512738429, -145.8563923 -5.17527125408))) - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17543698881), Math.toRadians(-145.790967486)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11348060995), Math.toRadians(-145.790854979)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11339421216), Math.toRadians(-145.853073512)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17535061936), Math.toRadians(-145.853192037)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17543698881), Geo3DUtil.fromDegrees(-145.790967486)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11348060995), Geo3DUtil.fromDegrees(-145.790854979)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11339421216), Geo3DUtil.fromDegrees(-145.853073512)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17535061936), Geo3DUtil.fromDegrees(-145.853192037)); final List points1 = new ArrayList<>(); points1.add(point1); points1.add(point2); points1.add(point3); points1.add(point4); GeoPolygonFactory.PolygonDescription pd1 = new GeoPolygonFactory.PolygonDescription(points1); - GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17527125408), Math.toRadians(-145.8563923)); - GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11332154814), Math.toRadians(-145.856222168)); - GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11317773171), Math.toRadians(-145.918433943)); - GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17512738429), Math.toRadians(-145.918610092)); + GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17527125408), Geo3DUtil.fromDegrees(-145.8563923)); + GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11332154814), Geo3DUtil.fromDegrees(-145.856222168)); + GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11317773171), Geo3DUtil.fromDegrees(-145.918433943)); + GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17512738429), Geo3DUtil.fromDegrees(-145.918610092)); final List points2 = new ArrayList<>(); points2.add(point5); points2.add(point6); @@ -813,19 +813,19 @@ public class CompositeGeoPolygonRelationshipsTest { public GeoShape getInsideCompositeShape(){ //MULTIPOLYGON(((19.945091 -60.552631, 20.319948 -61.555652, 20.9 -61.5, 20.9 -61, 19.945091 -60.552631)), // ((21.1 -61.5, 23.107901 -61.253298, 22.720804 -60.457713,21.1 -61, 21.1 -61.5))) - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-60.552631), Math.toRadians(19.945091)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61.555652), Math.toRadians(20.319948)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61.5), Math.toRadians(20.9)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61), Math.toRadians(20.9)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-60.552631), Geo3DUtil.fromDegrees(19.945091)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61.555652), Geo3DUtil.fromDegrees(20.319948)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61.5), Geo3DUtil.fromDegrees(20.9)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), Geo3DUtil.fromDegrees(20.9)); final List points1 = new ArrayList<>(); points1.add(point1); points1.add(point2); points1.add(point3); points1.add(point4); - GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61.5), Math.toRadians(21.1)); - GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61.253298), Math.toRadians(23.107901)); - GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-60.457713), Math.toRadians(22.720804)); - GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-61), Math.toRadians(21.1)); + GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61.5), Geo3DUtil.fromDegrees(21.1)); + GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61.253298), Geo3DUtil.fromDegrees(23.107901)); + GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-60.457713), Geo3DUtil.fromDegrees(22.720804)); + GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), Geo3DUtil.fromDegrees(21.1)); final List points2 = new ArrayList<>(); points2.add(point5); points2.add(point6); diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/Geo3DUtil.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/Geo3DUtil.java new file mode 100644 index 00000000000..e9b7ebc7c2d --- /dev/null +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/Geo3DUtil.java @@ -0,0 +1,285 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.lucene.spatial3d.geom; + +import org.apache.lucene.spatial3d.geom.PlanetModel; +import org.apache.lucene.spatial3d.geom.GeoPolygonFactory; +import org.apache.lucene.spatial3d.geom.GeoPathFactory; +import org.apache.lucene.spatial3d.geom.GeoCircleFactory; +import org.apache.lucene.spatial3d.geom.GeoBBoxFactory; +import org.apache.lucene.spatial3d.geom.GeoPath; +import org.apache.lucene.spatial3d.geom.GeoPolygon; +import org.apache.lucene.spatial3d.geom.GeoCircle; +import org.apache.lucene.spatial3d.geom.GeoBBox; +import org.apache.lucene.spatial3d.geom.GeoCompositePolygon; +import org.apache.lucene.spatial3d.geom.GeoPoint; + +import org.apache.lucene.geo.Polygon; +import org.apache.lucene.geo.GeoUtils; + +import java.util.List; +import java.util.ArrayList; + +class Geo3DUtil { + + /** How many radians are in one earth surface meter */ + final static double RADIANS_PER_METER = 1.0 / PlanetModel.WGS84_MEAN; + /** How many radians are in one degree */ + final static double RADIANS_PER_DEGREE = Math.PI / 180.0; + + private static final double MAX_VALUE = PlanetModel.WGS84.getMaximumMagnitude(); + private static final int BITS = 32; + private static final double MUL = (0x1L< MAX_VALUE) { + throw new IllegalArgumentException("value=" + x + " is out-of-bounds (greater than WGS84's planetMax=" + MAX_VALUE + ")"); + } + if (x < -MAX_VALUE) { + throw new IllegalArgumentException("value=" + x + " is out-of-bounds (less than than WGS84's -planetMax=" + -MAX_VALUE + ")"); + } + long result = (long) Math.floor(x / DECODE); + assert result >= Integer.MIN_VALUE; + assert result <= Integer.MAX_VALUE; + return (int) result; + } + + public static double decodeValue(int x) { + double result; + if (x == MIN_ENCODED_VALUE) { + // We must special case this, because -MAX_VALUE is not guaranteed to land precisely at a floor value, and we don't ever want to + // return a value outside of the planet's range (I think?). The max value is "safe" because we floor during encode: + result = -MAX_VALUE; + } else if (x == MAX_ENCODED_VALUE) { + result = MAX_VALUE; + } else { + // We decode to the center value; this keeps the encoding stable + result = (x+0.5) * DECODE; + } + assert result >= -MAX_VALUE && result <= MAX_VALUE; + return result; + } + + /** Returns smallest double that would encode to int x. */ + // NOTE: keep this package private!! + static double decodeValueFloor(int x) { + return x * DECODE; + } + + /** Returns a double value >= x such that if you multiply that value by an int, and then + * divide it by that int again, you get precisely the same value back */ + private static double getNextSafeDouble(double x) { + + // Move to double space: + long bits = Double.doubleToLongBits(x); + + // Make sure we are beyond the actual maximum value: + bits += Integer.MAX_VALUE; + + // Clear the bottom 32 bits: + bits &= ~((long) Integer.MAX_VALUE); + + // Convert back to double: + double result = Double.longBitsToDouble(bits); + assert result > x; + return result; + } + + /** Returns largest double that would encode to int x. */ + // NOTE: keep this package private!! + static double decodeValueCeil(int x) { + assert x < Integer.MAX_VALUE; + return Math.nextDown((x+1) * DECODE); + } + + /** Converts degress to radians */ + static double fromDegrees(final double degrees) { + return degrees * RADIANS_PER_DEGREE; + } + + /** Converts earth-surface meters to radians */ + static double fromMeters(final double meters) { + return meters * RADIANS_PER_METER; + } + + /** + * Convert a set of Polygon objects into a GeoPolygon. + * @param polygons are the Polygon objects. + * @return the GeoPolygon. + */ + static GeoPolygon fromPolygon(final Polygon... polygons) { + //System.err.println("Creating polygon..."); + if (polygons.length < 1) { + throw new IllegalArgumentException("need at least one polygon"); + } + final GeoPolygon shape; + if (polygons.length == 1) { + final GeoPolygon component = fromPolygon(polygons[0]); + if (component == null) { + // Polygon is degenerate + shape = new GeoCompositePolygon(); + } else { + shape = component; + } + } else { + final GeoCompositePolygon poly = new GeoCompositePolygon(); + for (final Polygon p : polygons) { + final GeoPolygon component = fromPolygon(p); + if (component != null) { + poly.addShape(component); + } + } + shape = poly; + } + return shape; + //System.err.println("...done"); + } + + + /** + * Convert a Polygon object to a large GeoPolygon. + * @param polygons is the list of polygons to convert. + * @return the large GeoPolygon. + */ + static GeoPolygon fromLargePolygon(final Polygon... polygons) { + if (polygons.length < 1) { + throw new IllegalArgumentException("need at least one polygon"); + } + return GeoPolygonFactory.makeLargeGeoPolygon(PlanetModel.WGS84, convertToDescription(polygons)); + } + + /** + * Convert input parameters to a path. + * @param pathLatitudes latitude values for points of the path: must be within standard +/-90 coordinate bounds. + * @param pathLongitudes longitude values for points of the path: must be within standard +/-180 coordinate bounds. + * @param pathWidthMeters width of the path in meters. + * @return the path. + */ + static GeoPath fromPath(final double[] pathLatitudes, final double[] pathLongitudes, final double pathWidthMeters) { + if (pathLatitudes.length != pathLongitudes.length) { + throw new IllegalArgumentException("same number of latitudes and longitudes required"); + } + final GeoPoint[] points = new GeoPoint[pathLatitudes.length]; + for (int i = 0; i < pathLatitudes.length; i++) { + final double latitude = pathLatitudes[i]; + final double longitude = pathLongitudes[i]; + GeoUtils.checkLatitude(latitude); + GeoUtils.checkLongitude(longitude); + points[i] = new GeoPoint(PlanetModel.WGS84, fromDegrees(latitude), fromDegrees(longitude)); + } + return GeoPathFactory.makeGeoPath(PlanetModel.WGS84, fromMeters(pathWidthMeters), points); + } + + /** + * Convert input parameters to a circle. + * @param latitude latitude at the center: must be within standard +/-90 coordinate bounds. + * @param longitude longitude at the center: must be within standard +/-180 coordinate bounds. + * @param radiusMeters maximum distance from the center in meters: must be non-negative and finite. + * @return the circle. + */ + static GeoCircle fromDistance(final double latitude, final double longitude, final double radiusMeters) { + GeoUtils.checkLatitude(latitude); + GeoUtils.checkLongitude(longitude); + return GeoCircleFactory.makeGeoCircle(PlanetModel.WGS84, fromDegrees(latitude), fromDegrees(longitude), fromMeters(radiusMeters)); + } + + /** + * Convert input parameters to a box. + * @param minLatitude latitude lower bound: must be within standard +/-90 coordinate bounds. + * @param maxLatitude latitude upper bound: must be within standard +/-90 coordinate bounds. + * @param minLongitude longitude lower bound: must be within standard +/-180 coordinate bounds. + * @param maxLongitude longitude upper bound: must be within standard +/-180 coordinate bounds. + * @return the box. + */ + static GeoBBox fromBox(final double minLatitude, final double maxLatitude, final double minLongitude, final double maxLongitude) { + GeoUtils.checkLatitude(minLatitude); + GeoUtils.checkLongitude(minLongitude); + GeoUtils.checkLatitude(maxLatitude); + GeoUtils.checkLongitude(maxLongitude); + return GeoBBoxFactory.makeGeoBBox(PlanetModel.WGS84, + Geo3DUtil.fromDegrees(maxLatitude), Geo3DUtil.fromDegrees(minLatitude), Geo3DUtil.fromDegrees(minLongitude), Geo3DUtil.fromDegrees(maxLongitude)); + } + + /** + * Convert a Polygon object into a GeoPolygon. + * This method uses + * @param polygon is the Polygon object. + * @return the GeoPolygon. + */ + private static GeoPolygon fromPolygon(final Polygon polygon) { + // First, assemble the "holes". The geo3d convention is to use the same polygon sense on the inner ring as the + // outer ring, so we process these recursively with reverseMe flipped. + final Polygon[] theHoles = polygon.getHoles(); + final List holeList = new ArrayList<>(theHoles.length); + for (final Polygon hole : theHoles) { + //System.out.println("Hole: "+hole); + final GeoPolygon component = fromPolygon(hole); + if (component != null) { + holeList.add(component); + } + } + + // Now do the polygon itself + final double[] polyLats = polygon.getPolyLats(); + final double[] polyLons = polygon.getPolyLons(); + + // I presume the arguments have already been checked + final List points = new ArrayList<>(polyLats.length-1); + // We skip the last point anyway because the API requires it to be repeated, and geo3d doesn't repeat it. + for (int i = 0; i < polyLats.length - 1; i++) { + final int index = polyLats.length - 2 - i; + points.add(new GeoPoint(PlanetModel.WGS84, fromDegrees(polyLats[index]), fromDegrees(polyLons[index]))); + } + //System.err.println(" building polygon with "+points.size()+" points..."); + final GeoPolygon rval = GeoPolygonFactory.makeGeoPolygon(PlanetModel.WGS84, points, holeList); + //System.err.println(" ...done"); + return rval; + } + + /** + * Convert a list of polygons to a list of polygon descriptions. + * @param polygons is the list of polygons to convert. + * @return the list of polygon descriptions. + */ + private static List convertToDescription(final Polygon... polygons) { + final List descriptions = new ArrayList<>(polygons.length); + for (final Polygon polygon : polygons) { + final Polygon[] theHoles = polygon.getHoles(); + final List holes = convertToDescription(theHoles); + + // Now do the polygon itself + final double[] polyLats = polygon.getPolyLats(); + final double[] polyLons = polygon.getPolyLons(); + + // I presume the arguments have already been checked + final List points = new ArrayList<>(polyLats.length-1); + // We skip the last point anyway because the API requires it to be repeated, and geo3d doesn't repeat it. + for (int i = 0; i < polyLats.length - 1; i++) { + final int index = polyLats.length - 2 - i; + points.add(new GeoPoint(PlanetModel.WGS84, fromDegrees(polyLats[index]), fromDegrees(polyLons[index]))); + } + + descriptions.add(new GeoPolygonFactory.PolygonDescription(points, holes)); + } + return descriptions; + } + + +} diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/SimpleGeoPolygonRelationshipsTest.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/SimpleGeoPolygonRelationshipsTest.java index 0d336396ba9..59a52b4ca91 100644 --- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/SimpleGeoPolygonRelationshipsTest.java +++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/geom/SimpleGeoPolygonRelationshipsTest.java @@ -547,7 +547,7 @@ public class SimpleGeoPolygonRelationshipsTest { 23.207901, -61.453298, 22.820804, -60.257713); - GeoCircle outCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Math.toRadians(-70), Math.toRadians(23), Math.toRadians(1)); + GeoCircle outCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-70), Geo3DUtil.fromDegrees(23), Geo3DUtil.fromDegrees(1)); int rel = originalConvexPol.getRelationship(outCircle); assertEquals(GeoArea.DISJOINT, rel); rel = originalConcavePol.getRelationship(outCircle); @@ -555,7 +555,7 @@ public class SimpleGeoPolygonRelationshipsTest { rel = originalComplexPol.getRelationship(outCircle); assertEquals(GeoArea.DISJOINT, rel); - GeoCircle overlapCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Math.toRadians(-61.5), Math.toRadians(20), Math.toRadians(1)); + GeoCircle overlapCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61.5), Geo3DUtil.fromDegrees(20), Geo3DUtil.fromDegrees(1)); rel = originalConvexPol.getRelationship(overlapCircle); assertEquals(GeoArea.OVERLAPS, rel); rel = originalConcavePol.getRelationship(overlapCircle); @@ -563,7 +563,7 @@ public class SimpleGeoPolygonRelationshipsTest { rel = originalComplexPol.getRelationship(overlapCircle); assertEquals(GeoArea.OVERLAPS, rel); - GeoCircle inCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Math.toRadians(-61), Math.toRadians(21), Math.toRadians(0.1)); + GeoCircle inCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), Geo3DUtil.fromDegrees(21), Geo3DUtil.fromDegrees(0.1)); rel = originalConvexPol.getRelationship(inCircle); assertEquals(GeoArea.WITHIN, rel); rel = originalConcavePol.getRelationship(inCircle); @@ -571,7 +571,7 @@ public class SimpleGeoPolygonRelationshipsTest { rel = originalComplexPol.getRelationship(inCircle); assertEquals(GeoArea.WITHIN, rel); - GeoCircle onCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Math.toRadians(-61), Math.toRadians(21), Math.toRadians(10.)); + GeoCircle onCircle = GeoCircleFactory.makeGeoCircle(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), Geo3DUtil.fromDegrees(21), Geo3DUtil.fromDegrees(10.)); rel = originalConvexPol.getRelationship(onCircle); assertEquals(GeoArea.CONTAINS, rel); rel = originalConcavePol.getRelationship(onCircle); @@ -598,10 +598,10 @@ public class SimpleGeoPolygonRelationshipsTest { 23.207901, -61.453298, 22.820804, -60.257713); - GeoBBox outRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.toRadians(-69), - Math.toRadians(-70), - Math.toRadians(22), - Math.toRadians(23)); + GeoBBox outRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-69), + Geo3DUtil.fromDegrees(-70), + Geo3DUtil.fromDegrees(22), + Geo3DUtil.fromDegrees(23)); int rel = originalConvexPol.getRelationship(outRectangle); assertEquals(GeoArea.DISJOINT, rel); rel = outRectangle.getRelationship(originalConvexPol); @@ -611,10 +611,10 @@ public class SimpleGeoPolygonRelationshipsTest { rel = originalComplexPol.getRelationship(outRectangle); assertEquals(GeoArea.DISJOINT, rel); - GeoBBox overlapRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.toRadians(-61), - Math.toRadians(-62), - Math.toRadians(22), - Math.toRadians(23)); + GeoBBox overlapRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), + Geo3DUtil.fromDegrees(-62), + Geo3DUtil.fromDegrees(22), + Geo3DUtil.fromDegrees(23)); rel = originalConvexPol.getRelationship(overlapRectangle); assertEquals(GeoArea.OVERLAPS, rel); rel = overlapRectangle.getRelationship(originalConvexPol); @@ -624,10 +624,10 @@ public class SimpleGeoPolygonRelationshipsTest { rel = originalComplexPol.getRelationship(overlapRectangle); assertEquals(GeoArea.OVERLAPS, rel); - GeoBBox inRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.toRadians(-61), - Math.toRadians(-61.1), - Math.toRadians(22.5), - Math.toRadians(23)); + GeoBBox inRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-61), + Geo3DUtil.fromDegrees(-61.1), + Geo3DUtil.fromDegrees(22.5), + Geo3DUtil.fromDegrees(23)); rel = originalConvexPol.getRelationship(inRectangle); assertEquals(GeoArea.WITHIN, rel); rel = inRectangle.getRelationship(originalConvexPol); @@ -637,10 +637,10 @@ public class SimpleGeoPolygonRelationshipsTest { rel = originalComplexPol.getRelationship(inRectangle); assertEquals(GeoArea.WITHIN, rel); - GeoBBox onRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Math.toRadians(-59), - Math.toRadians(-64.1), - Math.toRadians(18.5), - Math.toRadians(27)); + GeoBBox onRectangle = GeoBBoxFactory.makeGeoBBox(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-59), + Geo3DUtil.fromDegrees(-64.1), + Geo3DUtil.fromDegrees(18.5), + Geo3DUtil.fromDegrees(27)); rel = originalConvexPol.getRelationship(onRectangle); assertEquals(GeoArea.CONTAINS, rel); rel = onRectangle.getRelationship(originalConvexPol); @@ -726,10 +726,10 @@ public class SimpleGeoPolygonRelationshipsTest { double lon2, double lat2, double lon3, double lat3, double lon4, double lat4) { - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); final List points = new ArrayList<>(); points.add(point1); points.add(point2); @@ -742,10 +742,10 @@ public class SimpleGeoPolygonRelationshipsTest { double lon2, double lat2, double lon3, double lat3, double lon4, double lat4) { - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); final List points = new ArrayList<>(); points.add(point1); points.add(point2); @@ -758,10 +758,10 @@ public class SimpleGeoPolygonRelationshipsTest { double lon2, double lat2, double lon3, double lat3, double lon4, double lat4) { - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); final List points = new ArrayList<>(); points.add(point1); points.add(point2); @@ -776,10 +776,10 @@ public class SimpleGeoPolygonRelationshipsTest { double lon3, double lat3, double lon4, double lat4, GeoPolygon hole) { - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); final List points = new ArrayList<>(); points.add(point1); points.add(point2); @@ -794,10 +794,10 @@ public class SimpleGeoPolygonRelationshipsTest { double lon3, double lat3, double lon4, double lat4, GeoPolygon hole) { - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat1), Math.toRadians(lon1)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat2), Math.toRadians(lon2)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat3), Math.toRadians(lon3)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(lat4), Math.toRadians(lon4)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat1), Geo3DUtil.fromDegrees(lon1)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat2), Geo3DUtil.fromDegrees(lon2)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat3), Geo3DUtil.fromDegrees(lon3)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(lat4), Geo3DUtil.fromDegrees(lon4)); final List points = new ArrayList<>(); points.add(point1); points.add(point2); @@ -809,20 +809,20 @@ public class SimpleGeoPolygonRelationshipsTest { private GeoShape getCompositeShape(){ //MULTIPOLYGON(((-145.790967486 -5.17543698881, -145.790854979 -5.11348060995, -145.853073512 -5.11339421216, -145.853192037 -5.17535061936, -145.790967486 -5.17543698881)), //((-145.8563923 -5.17527125408, -145.856222168 -5.11332154814, -145.918433943 -5.11317773171, -145.918610092 -5.17512738429, -145.8563923 -5.17527125408))) - GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17543698881), Math.toRadians(-145.790967486)); - GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11348060995), Math.toRadians(-145.790854979)); - GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11339421216), Math.toRadians(-145.853073512)); - GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17535061936), Math.toRadians(-145.853192037)); + GeoPoint point1 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17543698881), Geo3DUtil.fromDegrees(-145.790967486)); + GeoPoint point2 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11348060995), Geo3DUtil.fromDegrees(-145.790854979)); + GeoPoint point3 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11339421216), Geo3DUtil.fromDegrees(-145.853073512)); + GeoPoint point4 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17535061936), Geo3DUtil.fromDegrees(-145.853192037)); final List points1 = new ArrayList<>(); points1.add(point1); points1.add(point2); points1.add(point3); points1.add(point4); GeoPolygon pol1 = GeoPolygonFactory.makeGeoPolygon(PlanetModel.SPHERE,points1); - GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17527125408), Math.toRadians(-145.8563923)); - GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11332154814), Math.toRadians(-145.856222168)); - GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.11317773171), Math.toRadians(-145.918433943)); - GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Math.toRadians(-5.17512738429), Math.toRadians(-145.918610092)); + GeoPoint point5 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17527125408), Geo3DUtil.fromDegrees(-145.8563923)); + GeoPoint point6 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11332154814), Geo3DUtil.fromDegrees(-145.856222168)); + GeoPoint point7 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.11317773171), Geo3DUtil.fromDegrees(-145.918433943)); + GeoPoint point8 = new GeoPoint(PlanetModel.SPHERE, Geo3DUtil.fromDegrees(-5.17512738429), Geo3DUtil.fromDegrees(-145.918610092)); final List points2 = new ArrayList<>(); points2.add(point5); points2.add(point6); From d620326b8888ead82fe0890ae59b07017ef595f0 Mon Sep 17 00:00:00 2001 From: Dawid Weiss Date: Fri, 4 Aug 2017 11:14:41 +0200 Subject: [PATCH 67/95] Update morfologik's address from sf to github. --- lucene/core/src/java/org/apache/lucene/util/fst/FST.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lucene/core/src/java/org/apache/lucene/util/fst/FST.java b/lucene/core/src/java/org/apache/lucene/util/fst/FST.java index 5ea6dabbb0e..31ee2b7bde8 100644 --- a/lucene/core/src/java/org/apache/lucene/util/fst/FST.java +++ b/lucene/core/src/java/org/apache/lucene/util/fst/FST.java @@ -54,7 +54,7 @@ import org.apache.lucene.util.RamUsageEstimator; /** Represents an finite state machine (FST), using a * compact byte[] format. *

    The format is similar to what's used by Morfologik - * (http://sourceforge.net/projects/morfologik). + * (https://github.com/morfologik/morfologik-stemming). * *

    See the {@link org.apache.lucene.util.fst package * documentation} for some simple examples. From 7dde798473d1a8640edafb41f28ad25d17f25a2d Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 4 Aug 2017 12:02:30 +0200 Subject: [PATCH 68/95] LUCENE-7914: Add a maximum recursion level in automaton recursive functions (Operations.isFinite and Operations.topsortState) to prevent large automaton to overflow the stack. --- lucene/CHANGES.txt | 7 ++++- .../lucene/util/automaton/Operations.java | 27 ++++++++++++++----- .../apache/lucene/util/automaton/RegExp.java | 22 ++++++++------- .../lucene/util/automaton/TestOperations.java | 26 ++++++++++++++++-- .../lucene/util/automaton/TestRegExp.java | 25 ++++++----------- .../analyzing/AnalyzingSuggesterTest.java | 3 +-- 6 files changed, 71 insertions(+), 39 deletions(-) diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt index d5cc9e8e6a5..c0f263c9fd4 100644 --- a/lucene/CHANGES.txt +++ b/lucene/CHANGES.txt @@ -14,7 +14,6 @@ Changes in Runtime Behavior ======================= Lucene 7.1.0 ======================= -(No Changes) Optimizations @@ -22,6 +21,12 @@ Optimizations SortedSetDocValuesFacetCounts and others) builds its map (Robert Muir, Adrien Grand, Mike McCandless) +Bug Fixes + +* LUCENE-7914: Add a maximum recursion level in automaton recursive + functions (Operations.isFinite and Operations.topsortState) to prevent + large automaton to overflow the stack (Robert Muir, Adrien Grand, Jim Ferenczi) + ======================= Lucene 7.0.0 ======================= New Features diff --git a/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java b/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java index b673a82e974..8ed1b12be2e 100644 --- a/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java +++ b/lucene/core/src/java/org/apache/lucene/util/automaton/Operations.java @@ -58,6 +58,11 @@ final public class Operations { */ public static final int DEFAULT_MAX_DETERMINIZED_STATES = 10000; + /** + * Maximum level of recursion allowed in recursive operations. + */ + public static final int MAX_RECURSION_LEVEL = 1000; + private Operations() {} /** @@ -1018,7 +1023,7 @@ final public class Operations { if (a.getNumStates() == 0) { return true; } - return isFinite(new Transition(), a, 0, new BitSet(a.getNumStates()), new BitSet(a.getNumStates())); + return isFinite(new Transition(), a, 0, new BitSet(a.getNumStates()), new BitSet(a.getNumStates()), 0); } /** @@ -1026,13 +1031,16 @@ final public class Operations { * there are never transitions to dead states.) */ // TODO: not great that this is recursive... in theory a - // large automata could exceed java's stack - private static boolean isFinite(Transition scratch, Automaton a, int state, BitSet path, BitSet visited) { + // large automata could exceed java's stack so the maximum level of recursion is bounded to 1000 + private static boolean isFinite(Transition scratch, Automaton a, int state, BitSet path, BitSet visited, int level) { + if (level > MAX_RECURSION_LEVEL) { + throw new IllegalArgumentException("input automaton is too large: " + level); + } path.set(state); int numTransitions = a.initTransition(state, scratch); for(int t=0;t MAX_RECURSION_LEVEL) { + throw new IllegalArgumentException("input automaton is too large: " + level); + } Transition t = new Transition(); int count = a.initTransition(state, t); for (int i=0;i maxDeterminizedStates) { + throw new TooComplexToDeterminizeException(a, minNumStates); + } + a = Operations.repeat(a, min); a = MinimizationOperations.minimize(a, maxDeterminizedStates); break; case REGEXP_REPEAT_MINMAX: - a = Operations.repeat( - exp1.toAutomatonInternal(automata, automaton_provider, - maxDeterminizedStates), - min, - max); - a = MinimizationOperations.minimize(a, maxDeterminizedStates); + a = exp1.toAutomatonInternal(automata, automaton_provider, maxDeterminizedStates); + int minMaxNumStates = (a.getNumStates() - 1) * max; + if (minMaxNumStates > maxDeterminizedStates) { + throw new TooComplexToDeterminizeException(a, minMaxNumStates); + } + a = Operations.repeat(a, min, max); break; case REGEXP_COMPLEMENT: a = Operations.complement( diff --git a/lucene/core/src/test/org/apache/lucene/util/automaton/TestOperations.java b/lucene/core/src/test/org/apache/lucene/util/automaton/TestOperations.java index 01517fc1d0a..5750e8a6ec4 100644 --- a/lucene/core/src/test/org/apache/lucene/util/automaton/TestOperations.java +++ b/lucene/core/src/test/org/apache/lucene/util/automaton/TestOperations.java @@ -52,8 +52,7 @@ public class TestOperations extends LuceneTestCase { for (BytesRef bref : strings) { eachIndividual[i++] = Automata.makeString(bref.utf8ToString()); } - return Operations.determinize(Operations.union(Arrays.asList(eachIndividual)), - DEFAULT_MAX_DETERMINIZED_STATES); + return Operations.determinize(Operations.union(Arrays.asList(eachIndividual)), DEFAULT_MAX_DETERMINIZED_STATES); } /** Test concatenation with empty language returns empty */ @@ -61,6 +60,7 @@ public class TestOperations extends LuceneTestCase { Automaton a = Automata.makeString("a"); Automaton concat = Operations.concatenate(a, Automata.makeEmpty()); assertTrue(Operations.isEmpty(concat)); + } /** Test optimization to concatenate() with empty String to an NFA */ @@ -124,6 +124,28 @@ public class TestOperations extends LuceneTestCase { } } + public void testIsFiniteEatsStack() { + char[] chars = new char[50000]; + TestUtil.randomFixedLengthUnicodeString(random(), chars, 0, chars.length); + String bigString1 = new String(chars); + TestUtil.randomFixedLengthUnicodeString(random(), chars, 0, chars.length); + String bigString2 = new String(chars); + Automaton a = Operations.union(Automata.makeString(bigString1), Automata.makeString(bigString2)); + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> Operations.isFinite(a)); + assertTrue(exc.getMessage().contains("input automaton is too large")); + } + + public void testTopoSortEatsStack() { + char[] chars = new char[50000]; + TestUtil.randomFixedLengthUnicodeString(random(), chars, 0, chars.length); + String bigString1 = new String(chars); + TestUtil.randomFixedLengthUnicodeString(random(), chars, 0, chars.length); + String bigString2 = new String(chars); + Automaton a = Operations.union(Automata.makeString(bigString1), Automata.makeString(bigString2)); + IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> Operations.topoSortStates(a)); + assertTrue(exc.getMessage().contains("input automaton is too large")); + } + /** * Returns the set of all accepted strings. * diff --git a/lucene/core/src/test/org/apache/lucene/util/automaton/TestRegExp.java b/lucene/core/src/test/org/apache/lucene/util/automaton/TestRegExp.java index b9ac6192c82..7d24939c347 100644 --- a/lucene/core/src/test/org/apache/lucene/util/automaton/TestRegExp.java +++ b/lucene/core/src/test/org/apache/lucene/util/automaton/TestRegExp.java @@ -19,13 +19,6 @@ package org.apache.lucene.util.automaton; import org.apache.lucene.util.LuceneTestCase; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.ObjectInput; -import java.io.ObjectInputStream; -import java.io.ObjectOutput; -import java.io.ObjectOutputStream; - public class TestRegExp extends LuceneTestCase { /** @@ -54,6 +47,14 @@ public class TestRegExp extends LuceneTestCase { assertTrue(expected.getMessage().contains(source)); } + public void testSerializeTooManyStatesToRepeat() throws Exception { + String source = "a{50001}"; + TooComplexToDeterminizeException expected = expectThrows(TooComplexToDeterminizeException.class, () -> { + new RegExp(source).toAutomaton(50000); + }); + assertTrue(expected.getMessage().contains(source)); + } + // LUCENE-6713 public void testSerializeTooManyStatesToDeterminizeExc() throws Exception { // LUCENE-6046 @@ -62,16 +63,6 @@ public class TestRegExp extends LuceneTestCase { new RegExp(source).toAutomaton(); }); assertTrue(expected.getMessage().contains(source)); - - ByteArrayOutputStream bos = new ByteArrayOutputStream(); - ObjectOutput out = new ObjectOutputStream(bos); - out.writeObject(expected); - byte[] bytes = bos.toByteArray(); - - ByteArrayInputStream bis = new ByteArrayInputStream(bytes); - ObjectInput in = new ObjectInputStream(bis); - TooComplexToDeterminizeException e2 = (TooComplexToDeterminizeException) in.readObject(); - assertNotNull(e2.getMessage()); } // LUCENE-6046 diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java index 590eb868288..06d44b9a8bf 100644 --- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java +++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/analyzing/AnalyzingSuggesterTest.java @@ -1252,10 +1252,9 @@ public class AnalyzingSuggesterTest extends LuceneTestCase { suggester.build(new InputArrayIterator(new Input[] { new Input(bigString, 7)})); fail("did not hit expected exception"); - } catch (StackOverflowError soe) { - // OK } catch (IllegalArgumentException iae) { // expected + assertTrue(iae.getMessage().contains("input automaton is too large")); } IOUtils.close(a, tempDir); } From c1d28c3ece276ec7bc5376b111cb0e99042e27a0 Mon Sep 17 00:00:00 2001 From: Noble Paul Date: Fri, 4 Aug 2017 17:25:11 +0930 Subject: [PATCH 69/95] SOLR-11178: Change error handling in AutoScalingHandler to be consistent w/ other APIs --- solr/CHANGES.txt | 2 + .../cloud/autoscaling/AutoScalingHandler.java | 41 ++++++++++++++----- .../apache/solr/servlet/ResponseUtils.java | 12 +++--- .../autoscaling/AutoScalingHandlerTest.java | 25 ++++++++++- .../client/solrj/impl/HttpSolrClient.java | 11 ++--- .../org/apache/solr/common/util/Utils.java | 2 +- 6 files changed, 69 insertions(+), 24 deletions(-) diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index 012ac868224..b5bf725fbd6 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -616,6 +616,8 @@ Other Changes * SOLR-10033: When attempting to facet with facet.mincount=0 over points fields, raise mincount to 1 and log a warning. (Steve Rowe) +* SOLR-11178: Change error handling in AutoScalingHandler to be consistent w/ other APIs (noble) + ================== 6.7.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java index 356ce379f7e..5ed5f8ef8e3 100644 --- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java +++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/AutoScalingHandler.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -167,16 +168,31 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission private void handleSetClusterPolicy(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op) throws KeeperException, InterruptedException, IOException { List clusterPolicy = (List) op.getCommandData(); if (clusterPolicy == null || !(clusterPolicy instanceof List)) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "A list of cluster policies was not found"); + op.addError("A list of cluster policies was not found"); + checkErr(op); + } + + try { + zkSetClusterPolicy(container.getZkController().getZkStateReader(), clusterPolicy); + } catch (Exception e) { + log.warn("error persisting policies"); + op.addError(e.getMessage()); + checkErr(op); + } - zkSetClusterPolicy(container.getZkController().getZkStateReader(), clusterPolicy); rsp.getValues().add("result", "success"); } + private void checkErr(CommandOperation op) { + if (!op.hasError()) return; + throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "Error in command payload", CommandOperation.captureErrors(Collections.singletonList(op))); + } + private void handleSetClusterPreferences(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op) throws KeeperException, InterruptedException, IOException { List preferences = (List) op.getCommandData(); if (preferences == null || !(preferences instanceof List)) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "A list of cluster preferences not found"); + op.addError("A list of cluster preferences not found"); + checkErr(op); } zkSetPreferences(container.getZkController().getZkStateReader(), preferences); rsp.getValues().add("result", "success"); @@ -185,15 +201,13 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission private void handleRemovePolicy(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation op) throws KeeperException, InterruptedException, IOException { String policyName = (String) op.getCommandData(); - if (policyName.trim().length() == 0) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "The policy name cannot be empty"); - } + if (op.hasError()) checkErr(op); Map autoScalingConf = zkReadAutoScalingConf(container.getZkController().getZkStateReader()); Map policies = (Map) autoScalingConf.get("policies"); if (policies == null || !policies.containsKey(policyName)) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No policy exists with name: " + policyName); + op.addError("No policy exists with name: " + policyName); } - + checkErr(op); zkSetPolicies(container.getZkController().getZkStateReader(), policyName, null); rsp.getValues().add("result", "success"); } @@ -203,11 +217,18 @@ public class AutoScalingHandler extends RequestHandlerBase implements Permission for (Map.Entry policy : policies.entrySet()) { String policyName = policy.getKey(); if (policyName == null || policyName.trim().length() == 0) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "The policy name cannot be null or empty"); + op.addError("The policy name cannot be null or empty"); } } + checkErr(op); - zkSetPolicies(container.getZkController().getZkStateReader(), null, policies); + try { + zkSetPolicies(container.getZkController().getZkStateReader(), null, policies); + } catch (Exception e) { + log.warn("error persisting policies", e); + op.addError(e.getMessage()); + checkErr(op); + } rsp.getValues().add("result", "success"); } diff --git a/solr/core/src/java/org/apache/solr/servlet/ResponseUtils.java b/solr/core/src/java/org/apache/solr/servlet/ResponseUtils.java index 90ca968c073..228bca81a03 100644 --- a/solr/core/src/java/org/apache/solr/servlet/ResponseUtils.java +++ b/solr/core/src/java/org/apache/solr/servlet/ResponseUtils.java @@ -15,15 +15,15 @@ * limitations under the License. */ package org.apache.solr.servlet; -import org.apache.solr.api.ApiBag; -import org.apache.solr.common.SolrException; -import org.apache.solr.common.util.NamedList; -import org.apache.solr.common.util.CommandOperation; -import org.slf4j.Logger; import java.io.PrintWriter; import java.io.StringWriter; +import org.apache.solr.api.ApiBag; +import org.apache.solr.common.SolrException; +import org.apache.solr.common.util.NamedList; +import org.slf4j.Logger; + /** * Response helper methods. */ @@ -52,7 +52,7 @@ public class ResponseUtils { info.add("metadata", errorMetadata); if (ex instanceof ApiBag.ExceptionWithErrObject) { ApiBag.ExceptionWithErrObject exception = (ApiBag.ExceptionWithErrObject) ex; - info.add(CommandOperation.ERR_MSGS, exception.getErrs() ); + info.add("details", exception.getErrs() ); } } diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java index 197801acfc5..9250e8c914c 100644 --- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java +++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoScalingHandlerTest.java @@ -81,9 +81,11 @@ public class AutoScalingHandlerTest extends SolrCloudTestCase { try { solrClient.request(req); fail("Adding a policy with 'cores' attribute should not have succeeded."); - } catch (HttpSolrClient.RemoteSolrException e) { + } catch (HttpSolrClient.RemoteExecutionException e) { + String message = String.valueOf(Utils.getObjectByPath(e.getMetaData(), true, "error/details[0]/errorMessages[0]")); + // expected - assertTrue(e.getMessage().contains("cores is only allowed in 'cluster-policy'")); + assertTrue(message.contains("cores is only allowed in 'cluster-policy'")); } setPolicyCommand = "{'set-policy': {" + @@ -175,6 +177,25 @@ public class AutoScalingHandlerTest extends SolrCloudTestCase { assertNotNull(clusterPolicy); assertEquals(3, clusterPolicy.size()); } + public void testErrorHandling() throws Exception { + CloudSolrClient solrClient = cluster.getSolrClient(); + + String setClusterPolicyCommand = "{" + + " 'set-cluster-policy': [" + + " {'cores':'<10', 'node':'#ANY'}," + + " {'shard': '#EACH', 'node': '#ANY'}," + + " {'nodeRole':'overseer', 'replica':0}" + + " ]" + + "}"; + try { + SolrRequest req = createAutoScalingRequest(SolrRequest.METHOD.POST, setClusterPolicyCommand); + solrClient.request(req); + fail("expect exception"); + } catch (HttpSolrClient.RemoteExecutionException e) { + String message = String.valueOf(Utils.getObjectByPath(e.getMetaData(), true, "error/details[0]/errorMessages[0]")); + assertTrue(message.contains("replica is required in")); + } + } @Test public void testReadApi() throws Exception { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java index 7566ba047c3..02d7c1a61ed 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java @@ -23,6 +23,7 @@ import java.lang.invoke.MethodHandles; import java.net.ConnectException; import java.net.SocketTimeoutException; import java.nio.charset.StandardCharsets; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; @@ -497,7 +498,9 @@ public class HttpSolrClient extends SolrClient { throw new SolrServerException("Unsupported method: " + request.getMethod()); } - + + private static final List errPath = Arrays.asList("metadata", "error-class");//Utils.getObjectByPath(err, false,"metadata/error-class") + protected NamedList executeMethod(HttpRequestBase method, final ResponseParser processor, final boolean isV2Api) throws SolrServerException { method.addHeader("User-Agent", AGENT); @@ -596,11 +599,9 @@ public class HttpSolrClient extends SolrClient { } catch (Exception e) { throw new RemoteSolrException(baseUrl, httpStatus, e.getMessage(), e); } - if (isV2Api) { - Object err = rsp.get("error"); - if (err != null) { + Object error = rsp == null ? null : rsp.get("error"); + if (error != null && (isV2Api || String.valueOf(getObjectByPath(error, true, errPath)).endsWith("ExceptionWithErrObject"))) { throw RemoteExecutionException.create(baseUrl, rsp); - } } if (httpStatus != HttpStatus.SC_OK && !isV2Api) { NamedList metadata = null; diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java index 0605a3505d1..8083886f243 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java @@ -285,7 +285,7 @@ public class Utils { public static Object getObjectByPath(Object root, boolean onlyPrimitive, List hierarchy) { if(root == null) return null; - if(!isMapLike(root)) throw new RuntimeException("must be a Map or NamedList"); + if(!isMapLike(root)) return null; Object obj = root; for (int i = 0; i < hierarchy.size(); i++) { int idx = -1; From 9627d1db5dccd6dc9c0c307065628efea621d8e5 Mon Sep 17 00:00:00 2001 From: Steve Rowe Date: Fri, 4 Aug 2017 19:32:46 -0400 Subject: [PATCH 70/95] SOLR-11023: Added EnumFieldType, a non-Trie-based version of EnumField, and deprecated EnumField in favor of EnumFieldType. --- solr/CHANGES.txt | 5 + .../handler/component/StatsValuesFactory.java | 2 +- .../apache/solr/schema/AbstractEnumField.java | 311 +++++++++++++ .../org/apache/solr/schema/EnumField.java | 311 +------------ .../org/apache/solr/schema/EnumFieldType.java | 213 +++++++++ .../solr/search/SolrDocumentFetcher.java | 15 +- .../collection1/conf/bad-schema-enums.xml | 34 ++ .../solr/collection1/conf/enumsConfig.xml | 19 + .../solr/collection1/conf/schema-enums.xml | 8 +- .../conf/schema-non-stored-docvalues.xml | 2 +- .../solr/collection1/conf/schema-sorts.xml | 21 +- .../solr/collection1/conf/schema.xml | 2 +- .../solr/collection1/conf/schema11.xml | 8 +- .../apache/solr/TestDistributedSearch.java | 2 +- .../org/apache/solr/schema/EnumFieldTest.java | 437 ++++++++++++++++-- .../solr/schema/TestUseDocValuesAsStored.java | 2 +- solr/solr-ref-guide/src/docvalues.adoc | 2 +- .../src/field-types-included-with-solr.adoc | 3 +- .../src/working-with-enum-fields.adoc | 20 +- .../java/org/apache/solr/SolrTestCaseJ4.java | 2 + 20 files changed, 1038 insertions(+), 381 deletions(-) create mode 100644 solr/core/src/java/org/apache/solr/schema/AbstractEnumField.java create mode 100644 solr/core/src/java/org/apache/solr/schema/EnumFieldType.java create mode 100644 solr/core/src/test-files/solr/collection1/conf/bad-schema-enums.xml diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index b5bf725fbd6..3cb1014dae3 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -263,6 +263,8 @@ Upgrading from Solr 6.x * All deperated methods of ClusterState (except getZkClusterStateVersion()) have been removed. Use DocCollection methods instead. +* SOLR-11023: EnumField has been deprecated in favor of new EnumFieldType. + New Features ---------------------- * SOLR-9857, SOLR-9858: Collect aggregated metrics from nodes and shard leaders in overseer. (ab) @@ -618,6 +620,9 @@ Other Changes * SOLR-11178: Change error handling in AutoScalingHandler to be consistent w/ other APIs (noble) +* SOLR-11023: Added EnumFieldType, a non-Trie-based version of EnumField, and deprecated EnumField + in favor of EnumFieldType. (hossman, Steve Rowe) + ================== 6.7.0 ================== Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release. diff --git a/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java b/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java index d39ada255cf..752846cf1b8 100644 --- a/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java +++ b/solr/core/src/java/org/apache/solr/handler/component/StatsValuesFactory.java @@ -78,7 +78,7 @@ public class StatsValuesFactory { return statsValue; } else if (StrField.class.isInstance(fieldType)) { return new StringStatsValues(statsField); - } else if (sf.getType().getClass().equals(EnumField.class)) { + } else if (AbstractEnumField.class.isInstance(fieldType)) { return new EnumStatsValues(statsField); } else { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, diff --git a/solr/core/src/java/org/apache/solr/schema/AbstractEnumField.java b/solr/core/src/java/org/apache/solr/schema/AbstractEnumField.java new file mode 100644 index 00000000000..1111cec3d5e --- /dev/null +++ b/solr/core/src/java/org/apache/solr/schema/AbstractEnumField.java @@ -0,0 +1,311 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.schema; + +import java.io.IOException; +import java.io.InputStream; +import java.lang.invoke.MethodHandles; +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; + +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.queries.function.ValueSource; +import org.apache.lucene.queries.function.valuesource.EnumFieldSource; +import org.apache.lucene.search.SortField; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.solr.common.EnumFieldValue; +import org.apache.solr.common.SolrException; +import org.apache.solr.response.TextResponseWriter; +import org.apache.solr.search.QParser; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.SAXException; + +/*** + * Abstract Field type for support of string values with custom sort order. + */ +public abstract class AbstractEnumField extends PrimitiveFieldType { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + protected EnumMapping enumMapping; + + @Override + protected void init(IndexSchema schema, Map args) { + super.init(schema, args); + enumMapping = new EnumMapping(schema, this, args); + } + + public EnumMapping getEnumMapping() { + return enumMapping; + } + + /** + * Models all the info contained in an enums config XML file + * @lucene.internal + */ + public static final class EnumMapping { + public static final String PARAM_ENUMS_CONFIG = "enumsConfig"; + public static final String PARAM_ENUM_NAME = "enumName"; + public static final Integer DEFAULT_VALUE = -1; + + public final Map enumStringToIntMap; + public final Map enumIntToStringMap; + + protected final String enumsConfigFile; + protected final String enumName; + + /** + * Takes in a FieldType and the initArgs Map used for that type, removing the keys + * that specify the enum. + * + * @param schema for opening resources + * @param fieldType Used for logging or error messages + * @param args the init args to comsume the enum name + config file from + */ + public EnumMapping(IndexSchema schema, FieldType fieldType, Map args) { + final String ftName = fieldType.getTypeName(); + + // NOTE: ghosting member variables for most of constructor + final Map enumStringToIntMap = new HashMap<>(); + final Map enumIntToStringMap = new HashMap<>(); + + enumsConfigFile = args.get(PARAM_ENUMS_CONFIG); + if (enumsConfigFile == null) { + throw new SolrException(SolrException.ErrorCode.NOT_FOUND, + ftName + ": No enums config file was configured."); + } + enumName = args.get(PARAM_ENUM_NAME); + if (enumName == null) { + throw new SolrException(SolrException.ErrorCode.NOT_FOUND, + ftName + ": No enum name was configured."); + } + + InputStream is = null; + + try { + is = schema.getResourceLoader().openResource(enumsConfigFile); + final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); + try { + final Document doc = dbf.newDocumentBuilder().parse(is); + final XPathFactory xpathFactory = XPathFactory.newInstance(); + final XPath xpath = xpathFactory.newXPath(); + final String xpathStr = String.format(Locale.ROOT, "/enumsConfig/enum[@name='%s']", enumName); + final NodeList nodes = (NodeList) xpath.evaluate(xpathStr, doc, XPathConstants.NODESET); + final int nodesLength = nodes.getLength(); + if (nodesLength == 0) { + String exceptionMessage = String.format + (Locale.ENGLISH, "%s: No enum configuration found for enum '%s' in %s.", + ftName, enumName, enumsConfigFile); + throw new SolrException(SolrException.ErrorCode.NOT_FOUND, exceptionMessage); + } + if (nodesLength > 1) { + if (log.isWarnEnabled()) + log.warn("{}: More than one enum configuration found for enum '{}' in {}. The last one was taken.", + ftName, enumName, enumsConfigFile); + } + final Node enumNode = nodes.item(nodesLength - 1); + final NodeList valueNodes = (NodeList) xpath.evaluate("value", enumNode, XPathConstants.NODESET); + for (int i = 0; i < valueNodes.getLength(); i++) { + final Node valueNode = valueNodes.item(i); + final String valueStr = valueNode.getTextContent(); + if ((valueStr == null) || (valueStr.length() == 0)) { + final String exceptionMessage = String.format + (Locale.ENGLISH, "%s: A value was defined with an no value in enum '%s' in %s.", + ftName, enumName, enumsConfigFile); + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, exceptionMessage); + } + if (enumStringToIntMap.containsKey(valueStr)) { + final String exceptionMessage = String.format + (Locale.ENGLISH, "%s: A duplicated definition was found for value '%s' in enum '%s' in %s.", + ftName, valueStr, enumName, enumsConfigFile); + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, exceptionMessage); + } + enumIntToStringMap.put(i, valueStr); + enumStringToIntMap.put(valueStr, i); + } + } + catch (ParserConfigurationException | XPathExpressionException | SAXException e) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + ftName + ": Error parsing enums config.", e); + } + } + catch (IOException e) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + ftName + ": Error while opening enums config.", e); + } finally { + try { + if (is != null) { + is.close(); + } + } + catch (IOException e) { + e.printStackTrace(); + } + } + + if ((enumStringToIntMap.size() == 0) || (enumIntToStringMap.size() == 0)) { + String exceptionMessage = String.format + (Locale.ENGLISH, "%s: Invalid configuration was defined for enum '%s' in %s.", + ftName, enumName, enumsConfigFile); + throw new SolrException(SolrException.ErrorCode.NOT_FOUND, exceptionMessage); + } + + this.enumStringToIntMap = Collections.unmodifiableMap(enumStringToIntMap); + this.enumIntToStringMap = Collections.unmodifiableMap(enumIntToStringMap); + + args.remove(PARAM_ENUMS_CONFIG); + args.remove(PARAM_ENUM_NAME); + } + + + + /** + * Converting the (internal) integer value (indicating the sort order) to string (displayed) value + * @param intVal integer value + * @return string value + */ + public String intValueToStringValue(Integer intVal) { + if (intVal == null) + return null; + + final String enumString = enumIntToStringMap.get(intVal); + if (enumString != null) + return enumString; + // can't find matching enum name - return DEFAULT_VALUE.toString() + return DEFAULT_VALUE.toString(); + } + + /** + * Converting the string (displayed) value (internal) to integer value (indicating the sort order) + * @param stringVal string value + * @return integer value + */ + public Integer stringValueToIntValue(String stringVal) { + if (stringVal == null) + return null; + + Integer intValue; + final Integer enumInt = enumStringToIntMap.get(stringVal); + if (enumInt != null) //enum int found for string + return enumInt; + + //enum int not found for string + intValue = tryParseInt(stringVal); + if (intValue == null) //not Integer + intValue = DEFAULT_VALUE; + final String enumString = enumIntToStringMap.get(intValue); + if (enumString != null) //has matching string + return intValue; + + return DEFAULT_VALUE; + } + + private static Integer tryParseInt(String valueStr) { + Integer intValue = null; + try { + intValue = Integer.parseInt(valueStr); + } + catch (NumberFormatException e) { + } + return intValue; + } + } + + @Override + public EnumFieldValue toObject(IndexableField f) { + Integer intValue = null; + String stringValue = null; + final Number val = f.numericValue(); + if (val != null) { + intValue = val.intValue(); + stringValue = enumMapping.intValueToStringValue(intValue); + } + return new EnumFieldValue(intValue, stringValue); + } + + @Override + public SortField getSortField(SchemaField field, boolean top) { + field.checkSortability(); + final Object missingValue = Integer.MIN_VALUE; + SortField sf = new SortField(field.getName(), SortField.Type.INT, top); + sf.setMissingValue(missingValue); + return sf; + } + + @Override + public ValueSource getValueSource(SchemaField field, QParser qparser) { + field.checkFieldCacheSource(); + return new EnumFieldSource(field.getName(), enumMapping.enumIntToStringMap, enumMapping.enumStringToIntMap); + } + + @Override + public void write(TextResponseWriter writer, String name, IndexableField f) throws IOException { + final Number val = f.numericValue(); + if (val == null) { + writer.writeNull(name); + return; + } + + final String readableValue = enumMapping.intValueToStringValue(val.intValue()); + writer.writeStr(name, readableValue, true); + } + + @Override + public boolean isTokenized() { + return false; + } + + @Override + public NumberType getNumberType() { + return NumberType.INTEGER; + } + + @Override + public String readableToIndexed(String val) { + if (val == null) + return null; + + final BytesRefBuilder bytes = new BytesRefBuilder(); + readableToIndexed(val, bytes); + return bytes.get().utf8ToString(); + } + + @Override + public String toInternal(String val) { + return readableToIndexed(val); + } + + @Override + public String toExternal(IndexableField f) { + final Number val = f.numericValue(); + if (val == null) + return null; + + return enumMapping.intValueToStringValue(val.intValue()); + } +} diff --git a/solr/core/src/java/org/apache/solr/schema/EnumField.java b/solr/core/src/java/org/apache/solr/schema/EnumField.java index a60cd807421..8eec185bfb4 100644 --- a/solr/core/src/java/org/apache/solr/schema/EnumField.java +++ b/solr/core/src/java/org/apache/solr/schema/EnumField.java @@ -16,21 +16,11 @@ */ package org.apache.solr.schema; -import java.io.IOException; -import java.io.InputStream; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Locale; -import java.util.Map; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import javax.xml.xpath.XPath; -import javax.xml.xpath.XPathConstants; -import javax.xml.xpath.XPathExpressionException; -import javax.xml.xpath.XPathFactory; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; @@ -40,153 +30,28 @@ import org.apache.solr.legacy.LegacyIntField; import org.apache.solr.legacy.LegacyNumericRangeQuery; import org.apache.solr.legacy.LegacyNumericType; import org.apache.solr.legacy.LegacyNumericUtils; -import org.apache.lucene.queries.function.ValueSource; -import org.apache.lucene.queries.function.valuesource.EnumFieldSource; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.SortField; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRefBuilder; import org.apache.solr.common.EnumFieldValue; import org.apache.solr.common.SolrException; -import org.apache.solr.response.TextResponseWriter; import org.apache.solr.search.QParser; import org.apache.solr.uninverting.UninvertingReader.Type; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.w3c.dom.Document; -import org.w3c.dom.Node; -import org.w3c.dom.NodeList; -import org.xml.sax.SAXException; -/*** +/** * Field type for support of string values with custom sort order. + * @deprecated use {@link EnumFieldType} instead. */ -public class EnumField extends PrimitiveFieldType { +@Deprecated +public class EnumField extends AbstractEnumField { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); - protected static final String PARAM_ENUMS_CONFIG = "enumsConfig"; - protected static final String PARAM_ENUM_NAME = "enumName"; - protected static final Integer DEFAULT_VALUE = -1; protected static final int DEFAULT_PRECISION_STEP = Integer.MAX_VALUE; - - protected Map enumStringToIntMap = new HashMap<>(); - protected Map enumIntToStringMap = new HashMap<>(); - - protected String enumsConfigFile; - protected String enumName; - - /** - * {@inheritDoc} - */ - @Override - protected void init(IndexSchema schema, Map args) { - super.init(schema, args); - enumsConfigFile = args.get(PARAM_ENUMS_CONFIG); - if (enumsConfigFile == null) { - throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "No enums config file was configured."); - } - enumName = args.get(PARAM_ENUM_NAME); - if (enumName == null) { - throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "No enum name was configured."); - } - - InputStream is = null; - - try { - is = schema.getResourceLoader().openResource(enumsConfigFile); - final DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); - try { - final Document doc = dbf.newDocumentBuilder().parse(is); - final XPathFactory xpathFactory = XPathFactory.newInstance(); - final XPath xpath = xpathFactory.newXPath(); - final String xpathStr = String.format(Locale.ROOT, "/enumsConfig/enum[@name='%s']", enumName); - final NodeList nodes = (NodeList) xpath.evaluate(xpathStr, doc, XPathConstants.NODESET); - final int nodesLength = nodes.getLength(); - if (nodesLength == 0) { - String exceptionMessage = String.format(Locale.ENGLISH, "No enum configuration found for enum '%s' in %s.", - enumName, enumsConfigFile); - throw new SolrException(SolrException.ErrorCode.NOT_FOUND, exceptionMessage); - } - if (nodesLength > 1) { - if (log.isWarnEnabled()) - log.warn("More than one enum configuration found for enum '{}' in {}. The last one was taken.", enumName, enumsConfigFile); - } - final Node enumNode = nodes.item(nodesLength - 1); - final NodeList valueNodes = (NodeList) xpath.evaluate("value", enumNode, XPathConstants.NODESET); - for (int i = 0; i < valueNodes.getLength(); i++) { - final Node valueNode = valueNodes.item(i); - final String valueStr = valueNode.getTextContent(); - if ((valueStr == null) || (valueStr.length() == 0)) { - final String exceptionMessage = String.format(Locale.ENGLISH, "A value was defined with an no value in enum '%s' in %s.", - enumName, enumsConfigFile); - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, exceptionMessage); - } - if (enumStringToIntMap.containsKey(valueStr)) { - final String exceptionMessage = String.format(Locale.ENGLISH, "A duplicated definition was found for value '%s' in enum '%s' in %s.", - valueStr, enumName, enumsConfigFile); - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, exceptionMessage); - } - enumIntToStringMap.put(i, valueStr); - enumStringToIntMap.put(valueStr, i); - } - } - catch (ParserConfigurationException | XPathExpressionException | SAXException e) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing enums config.", e); - } - } - catch (IOException e) { - throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error while opening enums config.", e); - } finally { - try { - if (is != null) { - is.close(); - } - } - catch (IOException e) { - e.printStackTrace(); - } - } - - if ((enumStringToIntMap.size() == 0) || (enumIntToStringMap.size() == 0)) { - String exceptionMessage = String.format(Locale.ENGLISH, "Invalid configuration was defined for enum '%s' in %s.", - enumName, enumsConfigFile); - throw new SolrException(SolrException.ErrorCode.NOT_FOUND, exceptionMessage); - } - - args.remove(PARAM_ENUMS_CONFIG); - args.remove(PARAM_ENUM_NAME); - } - - - /** - * {@inheritDoc} - */ - @Override - public EnumFieldValue toObject(IndexableField f) { - Integer intValue = null; - String stringValue = null; - final Number val = f.numericValue(); - if (val != null) { - intValue = val.intValue(); - stringValue = intValueToStringValue(intValue); - } - return new EnumFieldValue(intValue, stringValue); - } - - /** - * {@inheritDoc} - */ - @Override - public SortField getSortField(SchemaField field, boolean top) { - field.checkSortability(); - final Object missingValue = Integer.MIN_VALUE; - SortField sf = new SortField(field.getName(), SortField.Type.INT, top); - sf.setMissingValue(missingValue); - return sf; - } @Override public Type getUninversionType(SchemaField sf) { @@ -197,53 +62,10 @@ public class EnumField extends PrimitiveFieldType { } } - /** - * {@inheritDoc} - */ - @Override - public ValueSource getValueSource(SchemaField field, QParser qparser) { - field.checkFieldCacheSource(); - return new EnumFieldSource(field.getName(), enumIntToStringMap, enumStringToIntMap); - } - - /** - * {@inheritDoc} - */ - @Override - public void write(TextResponseWriter writer, String name, IndexableField f) throws IOException { - final Number val = f.numericValue(); - if (val == null) { - writer.writeNull(name); - return; - } - - final String readableValue = intValueToStringValue(val.intValue()); - writer.writeStr(name, readableValue, true); - } - - /** - * {@inheritDoc} - */ - @Override - public boolean isTokenized() { - return false; - } - - /** - * {@inheritDoc} - */ - @Override - public NumberType getNumberType() { - return NumberType.INTEGER; - } - - /** - * {@inheritDoc} - */ @Override public Query getRangeQuery(QParser parser, SchemaField field, String min, String max, boolean minInclusive, boolean maxInclusive) { - Integer minValue = stringValueToIntValue(min); - Integer maxValue = stringValueToIntValue(max); + Integer minValue = enumMapping.stringValueToIntValue(min); + Integer maxValue = enumMapping.stringValueToIntValue(max); if (field.multiValued() && field.hasDocValues() && !field.indexed()) { // for the multi-valued dv-case, the default rangeimpl over toInternal is correct @@ -277,90 +99,42 @@ public class EnumField extends PrimitiveFieldType { return query; } - /** - * {@inheritDoc} - */ - @Override - public String readableToIndexed(String val) { - if (val == null) - return null; - - final BytesRefBuilder bytes = new BytesRefBuilder(); - readableToIndexed(val, bytes); - return bytes.get().utf8ToString(); - } - - /** - * {@inheritDoc} - */ @Override public void readableToIndexed(CharSequence val, BytesRefBuilder result) { final String s = val.toString(); if (s == null) return; - final Integer intValue = stringValueToIntValue(s); + final Integer intValue = enumMapping.stringValueToIntValue(s); LegacyNumericUtils.intToPrefixCoded(intValue, 0, result); } - /** - * {@inheritDoc} - */ - @Override - public String toInternal(String val) { - return readableToIndexed(val); - } - - /** - * {@inheritDoc} - */ - @Override - public String toExternal(IndexableField f) { - final Number val = f.numericValue(); - if (val == null) - return null; - - return intValueToStringValue(val.intValue()); - } - - /** - * {@inheritDoc} - */ @Override public String indexedToReadable(String indexedForm) { if (indexedForm == null) return null; final BytesRef bytesRef = new BytesRef(indexedForm); final Integer intValue = LegacyNumericUtils.prefixCodedToInt(bytesRef); - return intValueToStringValue(intValue); + return enumMapping.intValueToStringValue(intValue); } - /** - * {@inheritDoc} - */ @Override public CharsRef indexedToReadable(BytesRef input, CharsRefBuilder output) { final Integer intValue = LegacyNumericUtils.prefixCodedToInt(input); - final String stringValue = intValueToStringValue(intValue); + final String stringValue = enumMapping.intValueToStringValue(intValue); output.grow(stringValue.length()); output.setLength(stringValue.length()); stringValue.getChars(0, output.length(), output.chars(), 0); return output.get(); } - /** - * {@inheritDoc} - */ @Override public EnumFieldValue toObject(SchemaField sf, BytesRef term) { final Integer intValue = LegacyNumericUtils.prefixCodedToInt(term); - final String stringValue = intValueToStringValue(intValue); + final String stringValue = enumMapping.intValueToStringValue(intValue); return new EnumFieldValue(intValue, stringValue); } - /** - * {@inheritDoc} - */ @Override public String storedToIndexed(IndexableField f) { final Number val = f.numericValue(); @@ -371,9 +145,6 @@ public class EnumField extends PrimitiveFieldType { return bytes.get().utf8ToString(); } - /** - * {@inheritDoc} - */ @Override public IndexableField createField(SchemaField field, Object value) { final boolean indexed = field.indexed(); @@ -385,8 +156,8 @@ public class EnumField extends PrimitiveFieldType { log.trace("Ignoring unindexed/unstored field: " + field); return null; } - final Integer intValue = stringValueToIntValue(value.toString()); - if (intValue == null || intValue.equals(DEFAULT_VALUE)) { + final Integer intValue = enumMapping.stringValueToIntValue(value.toString()); + if (intValue == null || intValue.equals(EnumMapping.DEFAULT_VALUE)) { String exceptionMessage = String.format(Locale.ENGLISH, "Unknown value for enum field: %s, value: %s", field.getName(), value.toString()); throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, exceptionMessage); @@ -408,9 +179,6 @@ public class EnumField extends PrimitiveFieldType { return new LegacyIntField(field.getName(), intValue.intValue(), newType); } - /** - * {@inheritDoc} - */ @Override public List createFields(SchemaField sf, Object value) { if (sf.hasDocValues()) { @@ -420,7 +188,7 @@ public class EnumField extends PrimitiveFieldType { if (sf.multiValued()) { BytesRefBuilder bytes = new BytesRefBuilder(); - readableToIndexed(stringValueToIntValue(value.toString()).toString(), bytes); + readableToIndexed(enumMapping.stringValueToIntValue(value.toString()).toString(), bytes); fields.add(new SortedSetDocValuesField(sf.getName(), bytes.toBytesRef())); } else { final long bits = field.numericValue().intValue(); @@ -431,57 +199,4 @@ public class EnumField extends PrimitiveFieldType { return Collections.singletonList(createField(sf, value)); } } - - /** - * Converting the (internal) integer value (indicating the sort order) to string (displayed) value - * @param intVal integer value - * @return string value - */ - public String intValueToStringValue(Integer intVal) { - if (intVal == null) - return null; - - final String enumString = enumIntToStringMap.get(intVal); - if (enumString != null) - return enumString; - // can't find matching enum name - return DEFAULT_VALUE.toString() - return DEFAULT_VALUE.toString(); - } - - /** - * Converting the string (displayed) value (internal) to integer value (indicating the sort order) - * @param stringVal string value - * @return integer value - */ - public Integer stringValueToIntValue(String stringVal) { - if (stringVal == null) - return null; - - Integer intValue; - final Integer enumInt = enumStringToIntMap.get(stringVal); - if (enumInt != null) //enum int found for string - return enumInt; - - //enum int not found for string - intValue = tryParseInt(stringVal); - if (intValue == null) //not Integer - intValue = DEFAULT_VALUE; - final String enumString = enumIntToStringMap.get(intValue); - if (enumString != null) //has matching string - return intValue; - - return DEFAULT_VALUE; - } - - private static Integer tryParseInt(String valueStr) { - Integer intValue = null; - try { - intValue = Integer.parseInt(valueStr); - } - catch (NumberFormatException e) { - } - return intValue; - } - } - diff --git a/solr/core/src/java/org/apache/solr/schema/EnumFieldType.java b/solr/core/src/java/org/apache/solr/schema/EnumFieldType.java new file mode 100644 index 00000000000..4bda8237b21 --- /dev/null +++ b/solr/core/src/java/org/apache/solr/schema/EnumFieldType.java @@ -0,0 +1,213 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.solr.schema; + +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.List; +import java.util.Locale; + +import org.apache.lucene.document.Field; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.queries.function.ValueSource; +import org.apache.lucene.queries.function.valuesource.MultiValuedIntFieldSource; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.SortedNumericSelector; +import org.apache.lucene.search.TermRangeQuery; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.CharsRef; +import org.apache.lucene.util.CharsRefBuilder; +import org.apache.lucene.util.NumericUtils; +import org.apache.solr.common.EnumFieldValue; +import org.apache.solr.common.SolrException; +import org.apache.solr.search.QParser; +import org.apache.solr.uninverting.UninvertingReader.Type; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Field type for support of string values with custom sort order. + */ +public class EnumFieldType extends AbstractEnumField { + private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + @Override + public Type getUninversionType(SchemaField sf) { + return null; + } + + @Override + public Query getRangeQuery(QParser parser, SchemaField field, String min, String max, boolean minInclusive, boolean maxInclusive) { + Integer minValue = enumMapping.stringValueToIntValue(min); + Integer maxValue = enumMapping.stringValueToIntValue(max); + + if (field.indexed()) { + BytesRef minBytes = null; + if (min != null) { + byte[] bytes = new byte[Integer.BYTES]; + NumericUtils.intToSortableBytes(minValue, bytes, 0); + minBytes = new BytesRef(bytes); + } + BytesRef maxBytes = null; + if (max != null) { + byte[] bytes = new byte[Integer.BYTES]; + NumericUtils.intToSortableBytes(maxValue, bytes, 0); + maxBytes = new BytesRef(bytes); + } + return new TermRangeQuery(field.getName(), minBytes, maxBytes, minInclusive, maxInclusive); + + } else { + long lowerValue = Long.MIN_VALUE; + long upperValue = Long.MAX_VALUE; + if (minValue != null) { + lowerValue = minValue.longValue(); + if (minInclusive == false) { + ++lowerValue; + } + } + if (maxValue != null) { + upperValue = maxValue.longValue(); + if (maxInclusive == false) { + --upperValue; + } + } + if (field.multiValued()) { + return new ConstantScoreQuery(SortedNumericDocValuesField.newSlowRangeQuery + (field.getName(), lowerValue, upperValue)); + } else { + return new ConstantScoreQuery(NumericDocValuesField.newSlowRangeQuery + (field.getName(), lowerValue, upperValue)); + } + } + } + + @Override + public void readableToIndexed(CharSequence val, BytesRefBuilder result) { + final String s = val.toString(); + if (s == null) + return; + + result.grow(Integer.BYTES); + result.setLength(Integer.BYTES); + final Integer intValue = enumMapping.stringValueToIntValue(s); + NumericUtils.intToSortableBytes(intValue, result.bytes(), 0); + } + + @Override + public String indexedToReadable(String indexedForm) { + if (indexedForm == null) + return null; + final BytesRef bytesRef = new BytesRef(indexedForm); + final Integer intValue = NumericUtils.sortableBytesToInt(bytesRef.bytes, 0); + return enumMapping.intValueToStringValue(intValue); + } + + @Override + public CharsRef indexedToReadable(BytesRef input, CharsRefBuilder output) { + final Integer intValue = NumericUtils.sortableBytesToInt(input.bytes, 0); + final String stringValue = enumMapping.intValueToStringValue(intValue); + output.grow(stringValue.length()); + output.setLength(stringValue.length()); + stringValue.getChars(0, output.length(), output.chars(), 0); + return output.get(); + } + + @Override + public EnumFieldValue toObject(SchemaField sf, BytesRef term) { + final Integer intValue = NumericUtils.sortableBytesToInt(term.bytes, 0); + final String stringValue = enumMapping.intValueToStringValue(intValue); + return new EnumFieldValue(intValue, stringValue); + } + + @Override + public String storedToIndexed(IndexableField f) { + final Number val = f.numericValue(); + if (val == null) + return null; + final BytesRefBuilder bytes = new BytesRefBuilder(); + bytes.grow(Integer.BYTES); + bytes.setLength(Integer.BYTES); + NumericUtils.intToSortableBytes(val.intValue(), bytes.bytes(), 0); + return bytes.get().utf8ToString(); + } + + @Override + public IndexableField createField(SchemaField field, Object value) { + final Integer intValue = enumMapping.stringValueToIntValue(value.toString()); + if (intValue == null || intValue.equals(EnumMapping.DEFAULT_VALUE)) { + String exceptionMessage = String.format(Locale.ENGLISH, "Unknown value for enum field: %s, value: %s", + field.getName(), value.toString()); + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, exceptionMessage); + } + + org.apache.lucene.document.FieldType newType = new org.apache.lucene.document.FieldType(); + newType.setTokenized(false); + newType.setStored(field.stored()); + newType.setOmitNorms(field.omitNorms()); + newType.setIndexOptions(field.indexOptions()); + newType.setStoreTermVectors(field.storeTermVector()); + newType.setStoreTermVectorOffsets(field.storeTermOffsets()); + newType.setStoreTermVectorPositions(field.storeTermPositions()); + newType.setStoreTermVectorPayloads(field.storeTermPayloads()); + + byte[] bytes = new byte[Integer.BYTES]; + NumericUtils.intToSortableBytes(intValue, bytes, 0); + return new Field(field.getName(), bytes, newType) { + @Override public Number numericValue() { + return NumericUtils.sortableBytesToInt(((BytesRef)fieldsData).bytes, 0); + } + }; + } + + @Override + public List createFields(SchemaField sf, Object value) { + if ( ! sf.hasDocValues()) { + throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, + getClass().getSimpleName() + " requires docValues=\"true\"."); + } + final IndexableField field = createField(sf, value); + final List fields = new ArrayList<>(); + fields.add(field); + final long longValue = field.numericValue().longValue(); + if (sf.multiValued()) { + fields.add(new SortedNumericDocValuesField(sf.getName(), longValue)); + } else { + fields.add(new NumericDocValuesField(sf.getName(), longValue)); + } + return fields; + } + + @Override + public final ValueSource getSingleValueSource(MultiValueSelector choice, SchemaField field, QParser parser) { + if ( ! field.multiValued()) { // trivial base case + return getValueSource(field, parser); // single value matches any selector + } + SortedNumericSelector.Type selectorType = choice.getSortedNumericSelectorType(); + if (null == selectorType) { + throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, + choice.toString() + " is not a supported option for picking a single value" + + " from the multivalued field: " + field.getName() + + " (type: " + this.getTypeName() + ")"); + } + return new MultiValuedIntFieldSource(field.getName(), selectorType); + } +} diff --git a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java index 267d4ebd560..8db8f6dd84c 100644 --- a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java +++ b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java @@ -56,7 +56,7 @@ import org.apache.lucene.util.NumericUtils; import org.apache.solr.common.SolrDocumentBase; import org.apache.solr.core.SolrConfig; import org.apache.solr.schema.BoolField; -import org.apache.solr.schema.EnumField; +import org.apache.solr.schema.AbstractEnumField; import org.apache.solr.schema.NumberType; import org.apache.solr.schema.SchemaField; import org.apache.solr.schema.TrieDateField; @@ -465,8 +465,8 @@ public class SolrDocumentFetcher { newVal = Double.longBitsToDouble(val); } else if (schemaField.getType() instanceof TrieDateField) { newVal = new Date(val); - } else if (schemaField.getType() instanceof EnumField) { - newVal = ((EnumField) schemaField.getType()).intValueToStringValue(val.intValue()); + } else if (schemaField.getType() instanceof AbstractEnumField) { + newVal = ((AbstractEnumField)schemaField.getType()).getEnumMapping().intValueToStringValue(val.intValue()); } } doc.addField(fieldName, newVal); @@ -501,7 +501,7 @@ public class SolrDocumentFetcher { break; case SORTED_NUMERIC: final SortedNumericDocValues numericDv = leafReader.getSortedNumericDocValues(fieldName); - NumberType type = schemaField.getType().getNumberType(); + final NumberType type = schemaField.getType().getNumberType(); if (numericDv != null) { if (numericDv.advance(localId) == localId) { final List outValues = new ArrayList(numericDv.docValueCount()); @@ -509,7 +509,12 @@ public class SolrDocumentFetcher { long number = numericDv.nextValue(); switch (type) { case INTEGER: - outValues.add((int)number); + final int raw = (int)number; + if (schemaField.getType() instanceof AbstractEnumField) { + outValues.add(((AbstractEnumField)schemaField.getType()).getEnumMapping().intValueToStringValue(raw)); + } else { + outValues.add(raw); + } break; case LONG: outValues.add(number); diff --git a/solr/core/src/test-files/solr/collection1/conf/bad-schema-enums.xml b/solr/core/src/test-files/solr/collection1/conf/bad-schema-enums.xml new file mode 100644 index 00000000000..675f45efab5 --- /dev/null +++ b/solr/core/src/test-files/solr/collection1/conf/bad-schema-enums.xml @@ -0,0 +1,34 @@ + + + + + + + + + + + + id + + + + + + diff --git a/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml b/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml index 726c8297de7..9bfbfc35447 100644 --- a/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml +++ b/solr/core/src/test-files/solr/collection1/conf/enumsConfig.xml @@ -27,7 +27,26 @@ Low Medium High + + x4 + x5 + x6 + x7 + x8 + x9 + x10 Critical + + x12 + x13 + x14 + x15 + x16 + x17 + x18 diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-enums.xml b/solr/core/src/test-files/solr/collection1/conf/schema-enums.xml index 85b4ffaac71..29bede8ee40 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema-enums.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema-enums.xml @@ -18,9 +18,9 @@ - - - + + + id @@ -32,7 +32,7 @@ - + diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml b/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml index 4e631becdf2..6477bd52754 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema-non-stored-docvalues.xml @@ -26,7 +26,7 @@ - + diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-sorts.xml b/solr/core/src/test-files/solr/collection1/conf/schema-sorts.xml index e2ba0f37d82..70edcb221c2 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema-sorts.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema-sorts.xml @@ -109,10 +109,9 @@ NOTE: Tests expect every field in this schema to be sortable. - - - - + + + - - - + + + @@ -309,9 +307,8 @@ NOTE: Tests expect every field in this schema to be sortable. sortMissingLast="true"/> - - - - + + + diff --git a/solr/core/src/test-files/solr/collection1/conf/schema.xml b/solr/core/src/test-files/solr/collection1/conf/schema.xml index 4aaef4842dd..1f6146d8fa2 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema.xml @@ -506,7 +506,7 @@ - + diff --git a/solr/core/src/test-files/solr/collection1/conf/schema11.xml b/solr/core/src/test-files/solr/collection1/conf/schema11.xml index 8b317b046f1..25b7e22bf7f 100644 --- a/solr/core/src/test-files/solr/collection1/conf/schema11.xml +++ b/solr/core/src/test-files/solr/collection1/conf/schema11.xml @@ -295,8 +295,8 @@ valued. --> - - + + - - + + - +