mirror of https://github.com/apache/lucene.git
LUCENE-5666: clear nocommits and fix precommit
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene5666@1594505 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
69d59acc31
commit
2078b86ba7
|
@ -65,6 +65,18 @@ API Changes
|
||||||
as tokens anymore, and now iterates cells on-demand during indexing instead of
|
as tokens anymore, and now iterates cells on-demand during indexing instead of
|
||||||
building a collection. RPT now has more setters. (David Smiley)
|
building a collection. RPT now has more setters. (David Smiley)
|
||||||
|
|
||||||
|
* LUCENE-5666: Change uninverted access (sorting, faceting, grouping, etc)
|
||||||
|
to use the DocValues API instead of FieldCache. For FieldCache functionality,
|
||||||
|
use UninvertingReader in lucene/misc (or implement your own FilterReader).
|
||||||
|
UninvertingReader is more efficient: supports multi-valued numeric fields,
|
||||||
|
detects when a multi-valued field is single-valued, reuses caches
|
||||||
|
of compatible types (e.g. SORTED also supports BINARY and SORTED_SET access
|
||||||
|
without insanity). "Insanity" is no longer possible unless you explicitly want it.
|
||||||
|
Rename FieldCache* and DocTermOrds* classes in the search package to DocValues*.
|
||||||
|
Move SortedSetSortField to core and add SortedSetFieldSource to queries/, which
|
||||||
|
takes the same selectors. Add helper methods to DocValues.java that are better
|
||||||
|
suited for search code (never return null, etc). (Mike McCandless, Robert Muir)
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
|
|
||||||
* LUCENE-5392: Add/improve analysis package documentation to reflect
|
* LUCENE-5392: Add/improve analysis package documentation to reflect
|
||||||
|
|
|
@ -85,7 +85,7 @@ public class TestCollationDocValuesField extends LuceneTestCase {
|
||||||
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
|
RandomIndexWriter iw = new RandomIndexWriter(random(), dir);
|
||||||
Document doc = new Document();
|
Document doc = new Document();
|
||||||
Field field = newField("field", "", StringField.TYPE_STORED);
|
Field field = newField("field", "", StringField.TYPE_STORED);
|
||||||
Collator collator = Collator.getInstance(); // uses -Dtests.locale
|
Collator collator = Collator.getInstance(Locale.getDefault()); // uses -Dtests.locale
|
||||||
if (random().nextBoolean()) {
|
if (random().nextBoolean()) {
|
||||||
collator.setStrength(Collator.PRIMARY);
|
collator.setStrength(Collator.PRIMARY);
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,6 @@
|
||||||
"/>
|
"/>
|
||||||
|
|
||||||
<property name="forbidden-rue-excludes" value="
|
<property name="forbidden-rue-excludes" value="
|
||||||
org/apache/lucene/search/FieldCache$CacheEntry.class
|
|
||||||
org/apache/lucene/util/RamUsageEstimator.class
|
org/apache/lucene/util/RamUsageEstimator.class
|
||||||
org/apache/lucene/search/CachingWrapperFilter.class
|
org/apache/lucene/search/CachingWrapperFilter.class
|
||||||
"/>
|
"/>
|
||||||
|
|
|
@ -300,7 +300,7 @@ public class BlockGroupingCollector extends SimpleCollector {
|
||||||
* This is normally not a problem, as you can obtain the
|
* This is normally not a problem, as you can obtain the
|
||||||
* value just like you obtain other values for each
|
* value just like you obtain other values for each
|
||||||
* matching document (eg, via stored fields, via
|
* matching document (eg, via stored fields, via
|
||||||
* FieldCache, etc.)
|
* DocValues, etc.)
|
||||||
*
|
*
|
||||||
* @param withinGroupSort The {@link Sort} used to sort
|
* @param withinGroupSort The {@link Sort} used to sort
|
||||||
* documents within each group. Passing null is
|
* documents within each group. Passing null is
|
||||||
|
|
|
@ -80,8 +80,7 @@ field fall into a single group.</p>
|
||||||
<p>Known limitations:</p>
|
<p>Known limitations:</p>
|
||||||
<ul>
|
<ul>
|
||||||
<li> For the two-pass grouping search, the group field must be a
|
<li> For the two-pass grouping search, the group field must be a
|
||||||
single-valued indexed field (or indexed as a {@link org.apache.lucene.document.SortedDocValuesField}).
|
indexed as a {@link org.apache.lucene.document.SortedDocValuesField}).
|
||||||
{@link org.apache.lucene.search.FieldCache} is used to load the {@link org.apache.lucene.index.SortedDocValues} for this field.
|
|
||||||
<li> Although Solr support grouping by function and this module has abstraction of what a group is, there are currently only
|
<li> Although Solr support grouping by function and this module has abstraction of what a group is, there are currently only
|
||||||
implementations for grouping based on terms.
|
implementations for grouping based on terms.
|
||||||
<li> Sharding is not directly supported, though is not too
|
<li> Sharding is not directly supported, though is not too
|
||||||
|
|
|
@ -46,7 +46,7 @@ public class TermFirstPassGroupingCollector extends AbstractFirstPassGroupingCol
|
||||||
*
|
*
|
||||||
* @param groupField The field used to group
|
* @param groupField The field used to group
|
||||||
* documents. This field must be single-valued and
|
* documents. This field must be single-valued and
|
||||||
* indexed (FieldCache is used to access its value
|
* indexed (DocValues is used to access its value
|
||||||
* per-document).
|
* per-document).
|
||||||
* @param groupSort The {@link Sort} used to sort the
|
* @param groupSort The {@link Sort} used to sort the
|
||||||
* groups. The top sorted document within each group
|
* groups. The top sorted document within each group
|
||||||
|
|
|
@ -16,6 +16,6 @@
|
||||||
-->
|
-->
|
||||||
<html>
|
<html>
|
||||||
<body>
|
<body>
|
||||||
Support for grouping by indexed terms via {@link org.apache.lucene.search.FieldCache}.
|
Support for grouping by indexed terms via {@link org.apache.lucene.index.DocValues}.
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -32,6 +32,10 @@
|
||||||
org/apache/lucene/misc/IndexMergeTool.class
|
org/apache/lucene/misc/IndexMergeTool.class
|
||||||
"/>
|
"/>
|
||||||
|
|
||||||
|
<property name="forbidden-rue-excludes" value="
|
||||||
|
org/apache/lucene/uninverting/FieldCache$CacheEntry.class
|
||||||
|
"/>
|
||||||
|
|
||||||
<import file="../module-build.xml"/>
|
<import file="../module-build.xml"/>
|
||||||
|
|
||||||
<target name="install-cpptasks" unless="cpptasks.uptodate" depends="ivy-availability-check,ivy-fail,ivy-configure">
|
<target name="install-cpptasks" unless="cpptasks.uptodate" depends="ivy-availability-check,ivy-fail,ivy-configure">
|
||||||
|
|
|
@ -40,6 +40,7 @@ import org.apache.lucene.index.FilterDirectoryReader;
|
||||||
import org.apache.lucene.index.NumericDocValues;
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
import org.apache.lucene.index.SortedDocValues;
|
import org.apache.lucene.index.SortedDocValues;
|
||||||
import org.apache.lucene.index.SortedSetDocValues;
|
import org.apache.lucene.index.SortedSetDocValues;
|
||||||
|
import org.apache.lucene.uninverting.FieldCache.CacheEntry;
|
||||||
import org.apache.lucene.util.Bits;
|
import org.apache.lucene.util.Bits;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.NumericUtils;
|
import org.apache.lucene.util.NumericUtils;
|
||||||
|
@ -309,4 +310,17 @@ public class UninvertingReader extends FilterAtomicReader {
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "Uninverting(" + in.toString() + ")";
|
return "Uninverting(" + in.toString() + ")";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Return information about the backing cache
|
||||||
|
* @lucene.internal
|
||||||
|
*/
|
||||||
|
public static String[] getUninvertedStats() {
|
||||||
|
CacheEntry[] entries = FieldCache.DEFAULT.getCacheEntries();
|
||||||
|
String[] info = new String[entries.length];
|
||||||
|
for (int i = 0; i < entries.length; i++) {
|
||||||
|
info[i] = entries[i].toString();
|
||||||
|
}
|
||||||
|
return info;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,28 +19,19 @@ package org.apache.solr.search;
|
||||||
|
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
|
||||||
|
import org.apache.lucene.uninverting.UninvertingReader;
|
||||||
import org.apache.solr.common.util.NamedList;
|
import org.apache.solr.common.util.NamedList;
|
||||||
import org.apache.solr.common.util.SimpleOrderedMap;
|
import org.apache.solr.common.util.SimpleOrderedMap;
|
||||||
|
|
||||||
import org.apache.solr.core.SolrCore;
|
import org.apache.solr.core.SolrCore;
|
||||||
import org.apache.solr.core.SolrInfoMBean;
|
import org.apache.solr.core.SolrInfoMBean;
|
||||||
|
|
||||||
/*
|
|
||||||
import org.apache.lucene.search.FieldCache;
|
|
||||||
import org.apache.lucene.search.FieldCache.CacheEntry;
|
|
||||||
import org.apache.lucene.util.FieldCacheSanityChecker;
|
|
||||||
import org.apache.lucene.util.FieldCacheSanityChecker.Insanity;
|
|
||||||
*/
|
|
||||||
// nocommit: maybe provide something useful here instead.
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A SolrInfoMBean that provides introspection of the Solr FieldCache
|
* A SolrInfoMBean that provides introspection of the Solr FieldCache
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
public class SolrFieldCacheMBean implements SolrInfoMBean {
|
public class SolrFieldCacheMBean implements SolrInfoMBean {
|
||||||
|
|
||||||
//protected FieldCacheSanityChecker checker = new FieldCacheSanityChecker();
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getName() { return this.getClass().getName(); }
|
public String getName() { return this.getClass().getName(); }
|
||||||
@Override
|
@Override
|
||||||
|
@ -62,21 +53,11 @@ public class SolrFieldCacheMBean implements SolrInfoMBean {
|
||||||
@Override
|
@Override
|
||||||
public NamedList getStatistics() {
|
public NamedList getStatistics() {
|
||||||
NamedList stats = new SimpleOrderedMap();
|
NamedList stats = new SimpleOrderedMap();
|
||||||
/*
|
String[] entries = UninvertingReader.getUninvertedStats();
|
||||||
CacheEntry[] entries = FieldCache.DEFAULT.getCacheEntries();
|
|
||||||
stats.add("entries_count", entries.length);
|
stats.add("entries_count", entries.length);
|
||||||
for (int i = 0; i < entries.length; i++) {
|
for (int i = 0; i < entries.length; i++) {
|
||||||
CacheEntry e = entries[i];
|
stats.add("entry#" + i, entries[i]);
|
||||||
stats.add("entry#" + i, e.toString());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Insanity[] insanity = checker.check(entries);
|
|
||||||
|
|
||||||
stats.add("insanity_count", insanity.length);
|
|
||||||
for (int i = 0; i < insanity.length; i++) {
|
|
||||||
|
|
||||||
stats.add("insanity#" + i, insanity[i].toString());
|
|
||||||
}*/
|
|
||||||
return stats;
|
return stats;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -417,10 +417,6 @@ public class TestFunctionQuery extends SolrTestCaseJ4 {
|
||||||
,"*//doc[1]/float[.='120.0']"
|
,"*//doc[1]/float[.='120.0']"
|
||||||
,"*//doc[2]/float[.='121.0']"
|
,"*//doc[2]/float[.='121.0']"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
||||||
// nocommit: split test if needed
|
|
||||||
// FieldCache.DEFAULT.purgeAllCaches(); // hide FC insanity
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in New Issue