SOLR-8475: Some refactoring to SolrIndexSearcher

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1723910 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Shai Erera 2016-01-10 08:30:26 +00:00
parent 7986b8248f
commit 556202dfca
18 changed files with 1386 additions and 1274 deletions

View File

@ -98,7 +98,7 @@ org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
org.eclipse.jdt.core.formatter.indent_empty_lines=true
org.eclipse.jdt.core.formatter.indent_empty_lines=false
org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
@ -286,7 +286,7 @@ org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constan
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.join_lines_in_comments=true
org.eclipse.jdt.core.formatter.join_wrapped_lines=true
org.eclipse.jdt.core.formatter.join_wrapped_lines=false
org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=true
org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false

View File

@ -74,6 +74,9 @@ Upgrading from Solr 5.x
then please ensure that the solrconfig.xml explicitly uses the ClassicIndexSchemaFactory :
<schemaFactory class="ClassicIndexSchemaFactory"/> or your luceneMatchVersion in the solrconfig.xml is less than 6.0
* SolrIndexSearcher.QueryCommand and QueryResult were moved to their own classes. If you reference them
in your code, you should import them under o.a.s.search (or use your IDE's "Organize Imports").
Detailed Change List
----------------------

View File

@ -81,7 +81,9 @@ import org.apache.solr.search.DocSlice;
import org.apache.solr.search.Grouping;
import org.apache.solr.search.QParser;
import org.apache.solr.search.QParserPlugin;
import org.apache.solr.search.QueryCommand;
import org.apache.solr.search.QueryParsing;
import org.apache.solr.search.QueryResult;
import org.apache.solr.search.RankQuery;
import org.apache.solr.search.ReturnFields;
import org.apache.solr.search.SolrIndexSearcher;
@ -365,12 +367,12 @@ public class QueryComponent extends SearchComponent
CursorMarkParams.CURSOR_MARK_PARAM + " and " + CommonParams.TIME_ALLOWED);
}
SolrIndexSearcher.QueryCommand cmd = rb.getQueryCommand();
QueryCommand cmd = rb.getQueryCommand();
cmd.setTimeAllowed(timeAllowed);
req.getContext().put(SolrIndexSearcher.STATS_SOURCE, statsCache.get(req));
SolrIndexSearcher.QueryResult result = new SolrIndexSearcher.QueryResult();
QueryResult result = new QueryResult();
//
// grouping / field collapsing

View File

@ -32,7 +32,8 @@ import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.search.CursorMark;
import org.apache.solr.search.DocListAndSet;
import org.apache.solr.search.QParser;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.QueryCommand;
import org.apache.solr.search.QueryResult;
import org.apache.solr.search.SortSpec;
import org.apache.solr.search.RankQuery;
import org.apache.solr.search.grouping.GroupingSpecification;
@ -419,8 +420,8 @@ public class ResponseBuilder
* Creates a SolrIndexSearcher.QueryCommand from this
* ResponseBuilder. TimeAllowed is left unset.
*/
public SolrIndexSearcher.QueryCommand getQueryCommand() {
SolrIndexSearcher.QueryCommand cmd = new SolrIndexSearcher.QueryCommand();
public QueryCommand getQueryCommand() {
QueryCommand cmd = new QueryCommand();
cmd.setQuery(wrap(getQuery()))
.setFilterList(getFilters())
.setSort(getSortSpec().getSort())
@ -444,7 +445,7 @@ public class ResponseBuilder
/**
* Sets results from a SolrIndexSearcher.QueryResult.
*/
public void setResult(SolrIndexSearcher.QueryResult result) {
public void setResult(QueryResult result) {
setResults(result.getDocListAndSet());
if (result.isPartialResults()) {
rsp.getResponseHeader().add(SolrQueryResponse.RESPONSE_HEADER_PARTIAL_RESULTS_KEY, Boolean.TRUE);

View File

@ -17,27 +17,38 @@
package org.apache.solr.response;
import org.apache.solr.internal.csv.CSVPrinter;
import org.apache.solr.internal.csv.CSVStrategy;
import java.io.CharArrayWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import org.apache.lucene.index.IndexableField;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.util.DateUtil;
import org.apache.solr.util.FastWriter;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.internal.csv.CSVPrinter;
import org.apache.solr.internal.csv.CSVStrategy;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.schema.FieldType;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.schema.StrField;
import org.apache.solr.search.DocList;
import org.apache.solr.search.ReturnFields;
import org.apache.solr.util.FastWriter;
import java.io.CharArrayWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.*;
import com.google.common.collect.Iterables;
import com.google.common.collect.Sets;
/**
*
@ -249,12 +260,11 @@ class CSVWriter extends TextResponseWriter {
}
} else {
// get the list of fields from the index
Collection<String> all = req.getSearcher().getFieldNames();
if(fields==null) {
fields = all;
}
else {
fields.addAll(all);
Iterable<String> all = req.getSearcher().getFieldNames();
if (fields == null) {
fields = Sets.newHashSet(all);
} else {
Iterables.addAll(fields, all);
}
}
if (returnFields.wantsScore()) {

View File

@ -90,7 +90,7 @@ public class ExportQParserPlugin extends QParserPlugin {
}
public TopDocsCollector getTopDocsCollector(int len,
SolrIndexSearcher.QueryCommand cmd,
QueryCommand cmd,
IndexSearcher searcher) throws IOException {
int leafCount = searcher.getTopReaderContext().leaves().size();
FixedBitSet[] sets = new FixedBitSet[leafCount];
@ -181,4 +181,4 @@ public class ExportQParserPlugin extends QParserPlugin {
}
}
}
}

View File

@ -83,8 +83,8 @@ public class Grouping {
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final SolrIndexSearcher searcher;
private final SolrIndexSearcher.QueryResult qr;
private final SolrIndexSearcher.QueryCommand cmd;
private final QueryResult qr;
private final QueryCommand cmd;
private final List<Command> commands = new ArrayList<>();
private final boolean main;
private final boolean cacheSecondPassSearch;
@ -124,8 +124,8 @@ public class Grouping {
* the cache is not used in the second pass search.
*/
public Grouping(SolrIndexSearcher searcher,
SolrIndexSearcher.QueryResult qr,
SolrIndexSearcher.QueryCommand cmd,
QueryResult qr,
QueryCommand cmd,
boolean cacheSecondPassSearch,
int maxDocsPercentageToCache,
boolean main) {

View File

@ -0,0 +1,211 @@
package org.apache.solr.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.ArrayList;
import java.util.List;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
/**
* A query request command to avoid having to change the method signatures if we want to pass additional information
* to the searcher.
*/
public class QueryCommand {
private Query query;
private List<Query> filterList;
private DocSet filter;
private Sort sort;
private int offset;
private int len;
private int supersetMaxDoc;
private int flags;
private long timeAllowed = -1;
private CursorMark cursorMark;
public CursorMark getCursorMark() {
return cursorMark;
}
public QueryCommand setCursorMark(CursorMark cursorMark) {
this.cursorMark = cursorMark;
if (null != cursorMark) {
// If we're using a cursor then we can't allow queryResult caching because the
// cache keys don't know anything about the collector used.
//
// in theory, we could enhance the cache keys to be aware of the searchAfter
// FieldDoc but then there would still be complexity around things like the cache
// window size that would need to be worked out
//
// we *can* however allow the use of checking the filterCache for non-score based
// sorts, because that still runs our paging collector over the entire DocSet
this.flags |= (SolrIndexSearcher.NO_CHECK_QCACHE | SolrIndexSearcher.NO_SET_QCACHE);
}
return this;
}
public Query getQuery() {
return query;
}
public QueryCommand setQuery(Query query) {
this.query = query;
return this;
}
public List<Query> getFilterList() {
return filterList;
}
/**
* @throws IllegalArgumentException
* if filter is not null.
*/
public QueryCommand setFilterList(List<Query> filterList) {
if (filter != null) {
throw new IllegalArgumentException("Either filter or filterList may be set in the QueryCommand, but not both.");
}
this.filterList = filterList;
return this;
}
/**
* A simple setter to build a filterList from a query
*
* @throws IllegalArgumentException
* if filter is not null.
*/
public QueryCommand setFilterList(Query f) {
if (filter != null) {
throw new IllegalArgumentException("Either filter or filterList may be set in the QueryCommand, but not both.");
}
filterList = null;
if (f != null) {
filterList = new ArrayList<>(2);
filterList.add(f);
}
return this;
}
public DocSet getFilter() {
return filter;
}
/**
* @throws IllegalArgumentException
* if filterList is not null.
*/
public QueryCommand setFilter(DocSet filter) {
if (filterList != null) {
throw new IllegalArgumentException("Either filter or filterList may be set in the QueryCommand, but not both.");
}
this.filter = filter;
return this;
}
public Sort getSort() {
return sort;
}
public QueryCommand setSort(Sort sort) {
this.sort = sort;
return this;
}
public int getOffset() {
return offset;
}
public QueryCommand setOffset(int offset) {
this.offset = offset;
return this;
}
public int getLen() {
return len;
}
public QueryCommand setLen(int len) {
this.len = len;
return this;
}
public int getSupersetMaxDoc() {
return supersetMaxDoc;
}
public QueryCommand setSupersetMaxDoc(int supersetMaxDoc) {
this.supersetMaxDoc = supersetMaxDoc;
return this;
}
public int getFlags() {
return flags;
}
public QueryCommand replaceFlags(int flags) {
this.flags = flags;
return this;
}
public QueryCommand setFlags(int flags) {
this.flags |= flags;
return this;
}
public QueryCommand clearFlags(int flags) {
this.flags &= ~flags;
return this;
}
public long getTimeAllowed() {
return timeAllowed;
}
public QueryCommand setTimeAllowed(long timeAllowed) {
this.timeAllowed = timeAllowed;
return this;
}
public boolean isNeedDocSet() {
return (flags & SolrIndexSearcher.GET_DOCSET) != 0;
}
public QueryCommand setNeedDocSet(boolean needDocSet) {
if (needDocSet) {
return setFlags(SolrIndexSearcher.GET_DOCSET);
} else {
return clearFlags(SolrIndexSearcher.GET_DOCSET);
}
}
public boolean getTerminateEarly() {
return (flags & SolrIndexSearcher.TERMINATE_EARLY) != 0;
}
public QueryCommand setTerminateEarly(boolean segmentTerminateEarly) {
if (segmentTerminateEarly) {
return setFlags(SolrIndexSearcher.TERMINATE_EARLY);
} else {
return clearFlags(SolrIndexSearcher.TERMINATE_EARLY);
}
}
}

View File

@ -0,0 +1,77 @@
package org.apache.solr.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* The result of a search.
*/
public class QueryResult {
private boolean partialResults;
private DocListAndSet docListAndSet;
private CursorMark nextCursorMark;
public Object groupedResults; // TODO: currently for testing
public DocList getDocList() {
return docListAndSet.docList;
}
public void setDocList(DocList list) {
if (docListAndSet == null) {
docListAndSet = new DocListAndSet();
}
docListAndSet.docList = list;
}
public DocSet getDocSet() {
return docListAndSet.docSet;
}
public void setDocSet(DocSet set) {
if (docListAndSet == null) {
docListAndSet = new DocListAndSet();
}
docListAndSet.docSet = set;
}
public boolean isPartialResults() {
return partialResults;
}
public void setPartialResults(boolean partialResults) {
this.partialResults = partialResults;
}
public void setDocListAndSet(DocListAndSet listSet) {
docListAndSet = listSet;
}
public DocListAndSet getDocListAndSet() {
return docListAndSet;
}
public void setNextCursorMark(CursorMark next) {
this.nextCursorMark = next;
}
public CursorMark getNextCursorMark() {
return nextCursorMark;
}
}

View File

@ -30,7 +30,7 @@ import java.io.IOException;
public abstract class RankQuery extends ExtendedQueryBase {
public abstract TopDocsCollector getTopDocsCollector(int len, SolrIndexSearcher.QueryCommand cmd, IndexSearcher searcher) throws IOException;
public abstract TopDocsCollector getTopDocsCollector(int len, QueryCommand cmd, IndexSearcher searcher) throws IOException;
public abstract MergeStrategy getMergeStrategy();
public abstract RankQuery wrap(Query mainQuery);

View File

@ -138,7 +138,7 @@ public class ReRankQParserPlugin extends QParserPlugin {
return null;
}
public TopDocsCollector getTopDocsCollector(int len, SolrIndexSearcher.QueryCommand cmd, IndexSearcher searcher) throws IOException {
public TopDocsCollector getTopDocsCollector(int len, QueryCommand cmd, IndexSearcher searcher) throws IOException {
if(this.boostedPriority == null) {
SolrRequestInfo info = SolrRequestInfo.getRequestInfo();
@ -234,7 +234,7 @@ public class ReRankQParserPlugin extends QParserPlugin {
int length,
Query reRankQuery,
double reRankWeight,
SolrIndexSearcher.QueryCommand cmd,
QueryCommand cmd,
IndexSearcher searcher,
Map<BytesRef, Integer> boostedPriority) throws IOException {
super(null);
@ -415,4 +415,4 @@ public class ReRankQParserPlugin extends QParserPlugin {
return -Float.compare(score1, score2);
}
}
}
}

View File

@ -24,18 +24,16 @@ import java.util.Map;
/**
* Primary API for dealing with Solr's internal caches.
*
*
*/
public interface SolrCache<K,V> extends SolrInfoMBean {
/**
* The initialization routine. Instance specific arguments are passed in
* The initialization routine. Instance specific arguments are passed in
* the <code>args</code> map.
* <p>
* The persistence object will exist across different lifetimes of similar caches.
* For example, all filter caches will share the same persistence object, sometimes
* at the same time (it must be threadsafe). If null is passed, then the cache
* at the same time (it must be thread-safe). If null is passed, then the cache
* implementation should create and return a new persistence object. If not null,
* the passed in object should be returned again.
* <p>
@ -48,7 +46,7 @@ public interface SolrCache<K,V> extends SolrInfoMBean {
* object may be of any type desired by the cache implementation.
* <p>
* The {@link CacheRegenerator} is what the cache uses during auto-warming to
* renenerate an item in the new cache from an entry in the old cache.
* regenerate an item in the new cache from an entry in the old cache.
*
*/
public Object init(Map args, Object persistence, CacheRegenerator regenerator);

View File

@ -41,6 +41,8 @@ import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.BitDocSet;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.DocSetCollector;
import org.apache.solr.search.QueryCommand;
import org.apache.solr.search.QueryResult;
import org.apache.solr.search.QueryUtils;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SolrIndexSearcher.ProcessedFilter;
@ -58,14 +60,14 @@ public class CommandHandler {
public static class Builder {
private SolrIndexSearcher.QueryCommand queryCommand;
private QueryCommand queryCommand;
private List<Command> commands = new ArrayList<>();
private SolrIndexSearcher searcher;
private boolean needDocSet = false;
private boolean truncateGroups = false;
private boolean includeHitCount = false;
public Builder setQueryCommand(SolrIndexSearcher.QueryCommand queryCommand) {
public Builder setQueryCommand(QueryCommand queryCommand) {
this.queryCommand = queryCommand;
this.needDocSet = (queryCommand.getFlags() & SolrIndexSearcher.GET_DOCSET) != 0;
return this;
@ -83,7 +85,7 @@ public class CommandHandler {
/**
* Sets whether to compute a {@link DocSet}.
* May override the value set by {@link #setQueryCommand(org.apache.solr.search.SolrIndexSearcher.QueryCommand)}.
* May override the value set by {@link #setQueryCommand(org.apache.solr.search.QueryCommand)}.
*
* @param needDocSet Whether to compute a {@link DocSet}
* @return this
@ -115,7 +117,7 @@ public class CommandHandler {
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final SolrIndexSearcher.QueryCommand queryCommand;
private final QueryCommand queryCommand;
private final List<Command> commands;
private final SolrIndexSearcher searcher;
private final boolean needDocset;
@ -126,7 +128,7 @@ public class CommandHandler {
private DocSet docSet;
private CommandHandler(SolrIndexSearcher.QueryCommand queryCommand,
private CommandHandler(QueryCommand queryCommand,
List<Command> commands,
SolrIndexSearcher searcher,
boolean needDocset,
@ -172,7 +174,7 @@ public class CommandHandler {
final AbstractAllGroupHeadsCollector allGroupHeadsCollector;
if (fieldType.getNumericType() != null) {
ValueSource vs = fieldType.getValueSource(sf, null);
allGroupHeadsCollector = new FunctionAllGroupHeadsCollector(vs, new HashMap<Object,Object>(), firstCommand.getSortWithinGroup());
allGroupHeadsCollector = new FunctionAllGroupHeadsCollector(vs, new HashMap(), firstCommand.getSortWithinGroup());
} else {
allGroupHeadsCollector = TermAllGroupHeadsCollector.create(firstCommand.getKey(), firstCommand.getSortWithinGroup());
}
@ -188,7 +190,6 @@ public class CommandHandler {
private DocSet computeDocSet(Query query, ProcessedFilter filter, List<Collector> collectors) throws IOException {
int maxDoc = searcher.maxDoc();
final Collector collector;
final DocSetCollector docSetCollector = new DocSetCollector(maxDoc);
List<Collector> allCollectors = new ArrayList<>(collectors);
allCollectors.add(docSetCollector);
@ -197,7 +198,7 @@ public class CommandHandler {
}
@SuppressWarnings("unchecked")
public NamedList processResult(SolrIndexSearcher.QueryResult queryResult, ShardResultTransformer transformer) throws IOException {
public NamedList processResult(QueryResult queryResult, ShardResultTransformer transformer) throws IOException {
if (docSet != null) {
queryResult.setDocSet(docSet);
}

View File

@ -24,8 +24,8 @@ import org.apache.solr.handler.component.ResponseBuilder;
import org.apache.solr.handler.component.ShardRequest;
import org.apache.solr.handler.component.ShardResponse;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.QueryCommand;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.SolrIndexSearcher.QueryCommand;
import org.apache.solr.util.plugin.PluginInfoInitialized;
/**

View File

@ -107,7 +107,7 @@ public class TestCloudPivotFacet extends AbstractFullDistribZkTestBase {
sanityCheckAssertNumerics();
waitForThingsToLevelOut(30000); // TODO: why whould we have to wait?
waitForThingsToLevelOut(30000); // TODO: why would we have to wait?
//
handle.clear();
handle.put("QTime", SKIPVAL);
@ -349,7 +349,7 @@ public class TestCloudPivotFacet extends AbstractFullDistribZkTestBase {
/**
* Compare top level stats in response with stats from pivot constraint
*/
private void assertPivotStats(String message, PivotField constraint, QueryResponse response) throws SolrServerException {
private void assertPivotStats(String message, PivotField constraint, QueryResponse response) {
if (null == constraint.getFieldStatsInfo()) {
// no stats for this pivot, nothing to check
@ -678,8 +678,7 @@ public class TestCloudPivotFacet extends AbstractFullDistribZkTestBase {
/**
* Asserts the number of docs found in the response
*/
private void assertNumFound(String msg, int expected, QueryResponse response)
throws SolrServerException {
private void assertNumFound(String msg, int expected, QueryResponse response) {
countNumFoundChecks++;

View File

@ -133,7 +133,7 @@ public class TestRankQueryPlugin extends QParserPlugin {
this.mergeStrategy = mergeStrategy;
}
public TopDocsCollector getTopDocsCollector(int len, SolrIndexSearcher.QueryCommand cmd, IndexSearcher searcher) {
public TopDocsCollector getTopDocsCollector(int len, QueryCommand cmd, IndexSearcher searcher) {
if(collector == 0)
return new TestCollector(null);
else

View File

@ -20,6 +20,8 @@ package org.apache.solr.util;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.QParser;
import org.apache.solr.search.QueryCommand;
import org.apache.solr.search.QueryResult;
import org.apache.solr.util.SolrPluginUtils.DisjunctionMaxQueryParser;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.search.DocList;
@ -70,8 +72,8 @@ public class SolrPluginUtilsTest extends SolrTestCaseJ4 {
RefCounted<SolrIndexSearcher> holder = h.getCore().getSearcher();
try {
SolrIndexSearcher srchr = holder.get();
SolrIndexSearcher.QueryResult qr = new SolrIndexSearcher.QueryResult();
SolrIndexSearcher.QueryCommand cmd = new SolrIndexSearcher.QueryCommand();
QueryResult qr = new QueryResult();
QueryCommand cmd = new QueryCommand();
cmd.setQuery(new MatchAllDocsQuery());
cmd.setLen(10);
qr = srchr.search(qr, cmd);