Merged /lucene/dev/trunk:r1441770-1442810

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene4547@1442821 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2013-02-06 00:58:30 +00:00
commit 6b8e374c44
57 changed files with 1009 additions and 1448 deletions

View File

@ -61,6 +61,11 @@
<artifactId>lucene-memory</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>lucene-queries</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<build>
<sourceDirectory>${module-path}/src/java</sourceDirectory>

View File

@ -94,6 +94,12 @@ New Features
* LUCENE-4723: Add AnalyzerFactoryTask to benchmark, and enable analyzer
creation via the resulting factories using NewAnalyzerTask. (Steve Rowe)
* LUCENE-4728: Unknown and not explicitly mapped queries are now rewritten
against the highlighting IndexReader to obtain primitive queries before
discarding the query entirely. WeightedSpanTermExtractor now builds a
MemoryIndex only once even if multiple fields are highlighted.
(Simon Willnauer)
API Changes
* LUCENE-4709: FacetResultNode no longer has a residue field. (Shai Erera)
@ -133,10 +139,18 @@ Bug Fixes
* LUCENE-4739: Fixed bugs that prevented FSTs more than ~1.1GB from
being saved and loaded (Adrien Grand, Mike McCandless)
Documentation
* LUCENE-4718: Fixed documentation of oal.queryparser.classic.
(Hayden Muhl via Adrien Grand)
Build
* LUCENE-4636: Upgrade ivy to 2.3.0 (Shawn Heisey via Robert Muir)
* LUCENE-4570: Use the Policeman Forbidden API checker, released separately
from Lucene and downloaded via Ivy. (Uwe Schindler, Robert Muir)
======================= Lucene 4.1.0 =======================
Changes in backwards compatibility policy

View File

@ -28,6 +28,8 @@ import org.apache.uima.resource.ResourceInitializationException;
import java.io.IOException;
import java.io.Reader;
import java.util.HashMap;
import java.util.Map;
/**
* Abstract base implementation of a {@link Tokenizer} which is able to analyze the given input with a
@ -39,10 +41,10 @@ public abstract class BaseUIMATokenizer extends Tokenizer {
protected final AnalysisEngine ae;
protected final CAS cas;
protected BaseUIMATokenizer(Reader reader, String descriptorPath) {
protected BaseUIMATokenizer(Reader reader, String descriptorPath, Map<String, Object> configurationParameters) {
super(reader);
try {
ae = AEProviderFactory.getInstance().getAEProvider(descriptorPath).getAE();
ae = AEProviderFactory.getInstance().getAEProvider(null, descriptorPath, configurationParameters).getAE();
cas = ae.newCAS();
} catch (ResourceInitializationException e) {
throw new RuntimeException(e);

View File

@ -26,6 +26,7 @@ import org.apache.uima.cas.text.AnnotationFS;
import java.io.IOException;
import java.io.Reader;
import java.util.Map;
/**
* a {@link Tokenizer} which creates tokens from UIMA Annotations
@ -40,8 +41,8 @@ public final class UIMAAnnotationsTokenizer extends BaseUIMATokenizer {
private int finalOffset = 0;
public UIMAAnnotationsTokenizer(String descriptorPath, String tokenType, Reader input) {
super(input, descriptorPath);
public UIMAAnnotationsTokenizer(String descriptorPath, String tokenType, Map<String, Object> configurationParameters, Reader input) {
super(input, descriptorPath, configurationParameters);
this.tokenTypeString = tokenType;
this.termAttr = addAttribute(CharTermAttribute.class);
this.offsetAttr = addAttribute(OffsetAttribute.class);

View File

@ -22,6 +22,7 @@ import org.apache.lucene.analysis.util.TokenizerFactory;
import org.apache.lucene.analysis.uima.UIMAAnnotationsTokenizer;
import java.io.Reader;
import java.util.HashMap;
import java.util.Map;
/**
@ -31,19 +32,29 @@ public class UIMAAnnotationsTokenizerFactory extends TokenizerFactory {
private String descriptorPath;
private String tokenType;
private Map<String, Object> configurationParameters;
@Override
public void init(Map<String, String> args) {
super.init(args);
descriptorPath = args.get("descriptorPath");
tokenType = args.get("tokenType");
if (descriptorPath == null || tokenType == null) {
throw new IllegalArgumentException("Both descriptorPath and tokenType are mandatory");
configurationParameters = new HashMap<String, Object>();
for (String k : args.keySet()) {
if (k.equals("tokenType")) {
tokenType = args.get("tokenType");
} else if (k.equals("descriptorPath")) {
descriptorPath = args.get("descriptorPath");
} else {
configurationParameters.put(k, args.get(k));
}
}
if (descriptorPath == null || tokenType == null ) {
throw new IllegalArgumentException("descriptorPath and tokenType are mandatory");
}
}
@Override
public Tokenizer create(Reader input) {
return new UIMAAnnotationsTokenizer(descriptorPath, tokenType, input);
return new UIMAAnnotationsTokenizer(descriptorPath, tokenType, configurationParameters, input);
}
}

View File

@ -20,6 +20,7 @@ package org.apache.lucene.analysis.uima;
import org.apache.lucene.analysis.Analyzer;
import java.io.Reader;
import java.util.Map;
/**
* An {@link Analyzer} which use the {@link UIMAAnnotationsTokenizer} for creating tokens
@ -28,15 +29,17 @@ public final class UIMABaseAnalyzer extends Analyzer {
private final String descriptorPath;
private final String tokenType;
private final Map<String, Object> configurationParameters;
public UIMABaseAnalyzer(String descriptorPath, String tokenType) {
public UIMABaseAnalyzer(String descriptorPath, String tokenType, Map<String, Object> configurationParameters) {
this.descriptorPath = descriptorPath;
this.tokenType = tokenType;
this.configurationParameters = configurationParameters;
}
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
return new TokenStreamComponents(new UIMAAnnotationsTokenizer(descriptorPath, tokenType, reader));
return new TokenStreamComponents(new UIMAAnnotationsTokenizer(descriptorPath, tokenType, configurationParameters, reader));
}
}

View File

@ -20,6 +20,7 @@ package org.apache.lucene.analysis.uima;
import org.apache.lucene.analysis.Analyzer;
import java.io.Reader;
import java.util.Map;
/**
* {@link Analyzer} which uses the {@link UIMATypeAwareAnnotationsTokenizer} for the tokenization phase
@ -28,15 +29,17 @@ public final class UIMATypeAwareAnalyzer extends Analyzer {
private final String descriptorPath;
private final String tokenType;
private final String featurePath;
private final Map<String, Object> configurationParameters;
public UIMATypeAwareAnalyzer(String descriptorPath, String tokenType, String featurePath) {
public UIMATypeAwareAnalyzer(String descriptorPath, String tokenType, String featurePath, Map<String, Object> configurationParameters) {
this.descriptorPath = descriptorPath;
this.tokenType = tokenType;
this.featurePath = featurePath;
this.configurationParameters = configurationParameters;
}
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
return new TokenStreamComponents(new UIMATypeAwareAnnotationsTokenizer(descriptorPath, tokenType, featurePath, reader));
return new TokenStreamComponents(new UIMATypeAwareAnnotationsTokenizer(descriptorPath, tokenType, featurePath, configurationParameters, reader));
}
}

View File

@ -29,6 +29,7 @@ import org.apache.uima.cas.text.AnnotationFS;
import java.io.IOException;
import java.io.Reader;
import java.util.Map;
/**
* A {@link Tokenizer} which creates token from UIMA Annotations filling also their {@link TypeAttribute} according to
@ -50,8 +51,8 @@ public final class UIMATypeAwareAnnotationsTokenizer extends BaseUIMATokenizer {
private int finalOffset = 0;
public UIMATypeAwareAnnotationsTokenizer(String descriptorPath, String tokenType, String typeAttributeFeaturePath, Reader input) {
super(input, descriptorPath);
public UIMATypeAwareAnnotationsTokenizer(String descriptorPath, String tokenType, String typeAttributeFeaturePath, Map<String, Object> configurationParameters, Reader input) {
super(input, descriptorPath, configurationParameters);
this.tokenTypeString = tokenType;
this.termAttr = addAttribute(CharTermAttribute.class);
this.typeAttr = addAttribute(TypeAttribute.class);

View File

@ -18,10 +18,10 @@ package org.apache.lucene.analysis.uima;
*/
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.uima.UIMATypeAwareAnnotationsTokenizer;
import org.apache.lucene.analysis.util.TokenizerFactory;
import java.io.Reader;
import java.util.HashMap;
import java.util.Map;
/**
@ -32,13 +32,23 @@ public class UIMATypeAwareAnnotationsTokenizerFactory extends TokenizerFactory {
private String descriptorPath;
private String tokenType;
private String featurePath;
private Map<String, Object> configurationParameters;
@Override
public void init(Map<String, String> args) {
super.init(args);
descriptorPath = args.get("descriptorPath");
tokenType = args.get("tokenType");
featurePath = args.get("featurePath");
configurationParameters = new HashMap<String, Object>();
for (String k : args.keySet()) {
if (k.equals("featurePath")) {
featurePath = args.get("featurePath");
} else if (k.equals("tokenType")) {
tokenType = args.get("tokenType");
} else if (k.equals("descriptorPath")) {
descriptorPath = args.get("descriptorPath");
} else {
configurationParameters.put(k, args.get(k));
}
}
if (descriptorPath == null || tokenType == null || featurePath == null) {
throw new IllegalArgumentException("descriptorPath, tokenType, and featurePath are mandatory");
}
@ -46,6 +56,6 @@ public class UIMATypeAwareAnnotationsTokenizerFactory extends TokenizerFactory {
@Override
public Tokenizer create(Reader input) {
return new UIMATypeAwareAnnotationsTokenizer(descriptorPath, tokenType, featurePath, input);
return new UIMATypeAwareAnnotationsTokenizer(descriptorPath, tokenType, featurePath, configurationParameters, input);
}
}

View File

@ -20,7 +20,7 @@
<primitive>true</primitive>
<annotatorImplementationName>org.apache.lucene.analysis.uima.an.SampleEntityAnnotator</annotatorImplementationName>
<analysisEngineMetaData>
<name>DummyPoSTagger</name>
<name>EntityAnnotator</name>
<description/>
<version>1.0</version>
<vendor>ASF</vendor>

View File

@ -20,9 +20,28 @@
<primitive>true</primitive>
<annotatorImplementationName>org.apache.lucene.analysis.uima.an.SampleWSTokenizerAnnotator</annotatorImplementationName>
<analysisEngineMetaData>
<name>DummyPoSTagger</name>
<name>WSTokenizer</name>
<version>1.0</version>
<vendor>ASF</vendor>
<configurationParameters>
<configurationParameter>
<name>line-end</name>
<description>
the string used as line end
</description>
<type>String</type>
<multiValued>false</multiValued>
<mandatory>false</mandatory>
</configurationParameter>
</configurationParameters>
<configurationParameterSettings>
<nameValuePair>
<name>line-end</name>
<value>
<string>\n</string>
</value>
</nameValuePair>
</configurationParameterSettings>
<typeSystemDescription>
<types>
<typeDescription>

View File

@ -36,6 +36,8 @@ import org.junit.Before;
import org.junit.Test;
import java.io.StringReader;
import java.util.HashMap;
import java.util.Map;
/**
* Testcase for {@link UIMABaseAnalyzer}
@ -48,7 +50,7 @@ public class UIMABaseAnalyzerTest extends BaseTokenStreamTestCase {
@Before
public void setUp() throws Exception {
super.setUp();
analyzer = new UIMABaseAnalyzer("/uima/AggregateSentenceAE.xml", "org.apache.uima.TokenAnnotation");
analyzer = new UIMABaseAnalyzer("/uima/AggregateSentenceAE.xml", "org.apache.uima.TokenAnnotation", null);
}
@Override
@ -120,7 +122,15 @@ public class UIMABaseAnalyzerTest extends BaseTokenStreamTestCase {
@Test
public void testRandomStrings() throws Exception {
checkRandomData(random(), new UIMABaseAnalyzer("/uima/TestAggregateSentenceAE.xml", "org.apache.lucene.uima.ts.TokenAnnotation"),
checkRandomData(random(), new UIMABaseAnalyzer("/uima/TestAggregateSentenceAE.xml", "org.apache.lucene.uima.ts.TokenAnnotation", null),
100 * RANDOM_MULTIPLIER);
}
@Test
public void testRandomStringsWithConfigurationParameters() throws Exception {
Map<String, Object> cp = new HashMap<String, Object>();
cp.put("line-end", "\r");
checkRandomData(random(), new UIMABaseAnalyzer("/uima/TestWSTokenizerAE.xml", "org.apache.lucene.uima.ts.TokenAnnotation", cp),
100 * RANDOM_MULTIPLIER);
}

View File

@ -37,7 +37,7 @@ public class UIMATypeAwareAnalyzerTest extends BaseTokenStreamTestCase {
public void setUp() throws Exception {
super.setUp();
analyzer = new UIMATypeAwareAnalyzer("/uima/AggregateSentenceAE.xml",
"org.apache.uima.TokenAnnotation", "posTag");
"org.apache.uima.TokenAnnotation", "posTag", null);
}
@Override
@ -63,7 +63,7 @@ public class UIMATypeAwareAnalyzerTest extends BaseTokenStreamTestCase {
@Test
public void testRandomStrings() throws Exception {
checkRandomData(random(), new UIMATypeAwareAnalyzer("/uima/TestAggregateSentenceAE.xml",
"org.apache.lucene.uima.ts.TokenAnnotation", "pos"), 100 * RANDOM_MULTIPLIER);
"org.apache.lucene.uima.ts.TokenAnnotation", "pos", null), 100 * RANDOM_MULTIPLIER);
}
}

View File

@ -17,11 +17,13 @@ package org.apache.lucene.analysis.uima.an;
* limitations under the License.
*/
import org.apache.uima.UimaContext;
import org.apache.uima.analysis_component.JCasAnnotator_ImplBase;
import org.apache.uima.analysis_engine.AnalysisEngineProcessException;
import org.apache.uima.cas.Type;
import org.apache.uima.cas.text.AnnotationFS;
import org.apache.uima.jcas.JCas;
import org.apache.uima.resource.ResourceInitializationException;
/**
* Dummy implementation of a UIMA based whitespace tokenizer
@ -30,15 +32,21 @@ public class SampleWSTokenizerAnnotator extends JCasAnnotator_ImplBase {
private final static String TOKEN_TYPE = "org.apache.lucene.uima.ts.TokenAnnotation";
private final static String SENTENCE_TYPE = "org.apache.lucene.uima.ts.SentenceAnnotation";
private static final String CR = "\n";
private String lineEnd;
private static final String WHITESPACE = " ";
@Override
public void initialize(UimaContext aContext) throws ResourceInitializationException {
super.initialize(aContext);
lineEnd = String.valueOf(aContext.getConfigParameterValue("line-end"));
}
@Override
public void process(JCas jCas) throws AnalysisEngineProcessException {
Type sentenceType = jCas.getCas().getTypeSystem().getType(SENTENCE_TYPE);
Type tokenType = jCas.getCas().getTypeSystem().getType(TOKEN_TYPE);
int i = 0;
for (String sentenceString : jCas.getDocumentText().split(CR)) {
for (String sentenceString : jCas.getDocumentText().split(lineEnd)) {
// add the sentence
AnnotationFS sentenceAnnotation = jCas.getCas().createAnnotation(sentenceType, i, sentenceString.length());
jCas.addFsToIndexes(sentenceAnnotation);

View File

@ -147,6 +147,7 @@
<pathelement path="${analyzers-common.jar}"/>
<pathelement path="${queryparser.jar}"/>
<pathelement path="${facet.jar}"/>
<pathelement path="${queries.jar}"/>
<fileset dir="${common.dir}/analysis/icu/lib"/>
<path refid="base.classpath"/>
<fileset dir="lib"/>

View File

@ -157,28 +157,34 @@
<license-check-macro dir="${basedir}" licensedir="${common.dir}/licenses" />
</target>
<target name="check-forbidden-apis" depends="compile-tools,compile-test,load-custom-tasks,-check-forbidden-jdk-apis,-check-forbidden-test-apis,-check-system-out" description="Check forbidden API calls in compiled class files"/>
<target name="check-forbidden-apis" depends="compile-tools,compile-test,install-forbidden-apis,-forbidden-apis-classpath,-check-forbidden-jdk-apis,-check-forbidden-test-apis,-check-system-out" description="Check forbidden API calls in compiled class files"/>
<!-- TODO: Make the forbidden API checks per module! -->
<target name="-forbidden-apis-classpath">
<path id="forbidden-apis.classpath">
<fileset dir="${basedir}" includes="**/lib/*.jar"/>
<dirset dir="${basedir}/build" includes="**/classes/*"/>
</path>
</target>
<target name="-check-forbidden-jdk-apis">
<forbidden-apis>
<apiFileSet dir="${custom-tasks.dir}/forbiddenApis">
<include name="jdk.txt" />
<include name="jdk-deprecated.txt" />
<include name="executors.txt" />
</apiFileSet>
<forbidden-apis internalRuntimeForbidden="true" classpathref="forbidden-apis.classpath">
<bundledSignatures name="jdk-unsafe-${javac.target}"/>
<bundledSignatures name="jdk-deprecated-${javac.target}"/>
<signaturesFileSet file="${common.dir}/tools/forbiddenApis/executors.txt"/>
<fileset dir="${basedir}/build" includes="**/*.class" />
</forbidden-apis>
</target>
<target name="-check-forbidden-test-apis">
<forbidden-apis apiFile="${custom-tasks.dir}/forbiddenApis/tests.txt">
<forbidden-apis signaturesFile="${common.dir}/tools/forbiddenApis/tests.txt" classpathref="forbidden-apis.classpath">
<classpath refid="junit-path"/>
<fileset dir="${basedir}/build" includes="**/classes/test/**/*.class,test-framework/**/*.class" />
</forbidden-apis>
</target>
<target name="-check-system-out">
<forbidden-apis apiFile="${custom-tasks.dir}/forbiddenApis/system-out.txt">
<forbidden-apis bundledSignatures="jdk-system-out" classpathref="forbidden-apis.classpath">
<fileset dir="${basedir}/build">
<include name="**/classes/java/**/*.class"/>
<!-- this is basically tests -->

View File

@ -1905,6 +1905,14 @@ ${tests-output}/junit4-*.suites - per-JVM executed suites
<property name="groovy.loaded" value="true"/>
</target>
<!-- Forbidden API Task -->
<target name="install-forbidden-apis" unless="forbidden-apis.loaded" depends="ivy-availability-check,ivy-configure">
<ivy:cachepath organisation="de.thetaphi" module="forbiddenapis" revision="1.0"
inline="true" conf="default" transitive="true" pathid="forbidden-apis.classpath"/>
<taskdef name="forbidden-apis" classname="de.thetaphi.forbiddenapis.AntTask" classpathref="forbidden-apis.classpath"/>
<property name="forbidden-apis.loaded" value="true"/>
</target>
<!-- PEGDOWN macro: Before using depend on the target "resolve-pegdown" -->
<target name="resolve-pegdown" unless="pegdown.loaded" depends="ivy-availability-check,ivy-configure">

View File

@ -29,6 +29,8 @@ import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocTermOrds;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
@ -89,21 +91,22 @@ public interface FieldCache {
Object value;
}
/**
* Hack: When thrown from a Parser (NUMERIC_UTILS_* ones), this stops
* processing terms and returns the current FieldCache
* array.
* @lucene.internal
*/
public static final class StopFillCacheException extends RuntimeException {
}
/**
* Marker interface as super-interface to all parsers. It
* is used to specify a custom parser to {@link
* SortField#SortField(String, FieldCache.Parser)}.
*/
public interface Parser {
/**
* Pulls a {@link TermsEnum} from the given {@link Terms}. This method allows certain parsers
* to filter the actual TermsEnum before the field cache is filled.
*
* @param terms the {@link Terms} instance to create the {@link TermsEnum} from.
* @return a possibly filtered {@link TermsEnum} instance, this method must not return <code>null</code>.
* @throws IOException if an {@link IOException} occurs
*/
public TermsEnum termsEnum(Terms terms) throws IOException;
}
/** Interface to parse bytes from document fields.
@ -171,6 +174,10 @@ public interface FieldCache {
public String toString() {
return FieldCache.class.getName()+".DEFAULT_BYTE_PARSER";
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
};
/** The default parser for short values, which are encoded by {@link Short#toString(short)} */
@ -187,6 +194,11 @@ public interface FieldCache {
public String toString() {
return FieldCache.class.getName()+".DEFAULT_SHORT_PARSER";
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
};
/** The default parser for int values, which are encoded by {@link Integer#toString(int)} */
@ -199,6 +211,12 @@ public interface FieldCache {
// directly from byte[]
return Integer.parseInt(term.utf8ToString());
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public String toString() {
return FieldCache.class.getName()+".DEFAULT_INT_PARSER";
@ -215,6 +233,12 @@ public interface FieldCache {
// directly from byte[]
return Float.parseFloat(term.utf8ToString());
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public String toString() {
return FieldCache.class.getName()+".DEFAULT_FLOAT_PARSER";
@ -231,6 +255,12 @@ public interface FieldCache {
// directly from byte[]
return Long.parseLong(term.utf8ToString());
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public String toString() {
return FieldCache.class.getName()+".DEFAULT_LONG_PARSER";
@ -247,6 +277,12 @@ public interface FieldCache {
// directly from byte[]
return Double.parseDouble(term.utf8ToString());
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
@Override
public String toString() {
return FieldCache.class.getName()+".DEFAULT_DOUBLE_PARSER";
@ -260,10 +296,14 @@ public interface FieldCache {
public static final IntParser NUMERIC_UTILS_INT_PARSER=new IntParser(){
@Override
public int parseInt(BytesRef term) {
if (NumericUtils.getPrefixCodedIntShift(term) > 0)
throw new StopFillCacheException();
return NumericUtils.prefixCodedToInt(term);
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return NumericUtils.filterPrefixCodedInts(terms.iterator(null));
}
@Override
public String toString() {
return FieldCache.class.getName()+".NUMERIC_UTILS_INT_PARSER";
@ -277,14 +317,17 @@ public interface FieldCache {
public static final FloatParser NUMERIC_UTILS_FLOAT_PARSER=new FloatParser(){
@Override
public float parseFloat(BytesRef term) {
if (NumericUtils.getPrefixCodedIntShift(term) > 0)
throw new StopFillCacheException();
return NumericUtils.sortableIntToFloat(NumericUtils.prefixCodedToInt(term));
}
@Override
public String toString() {
return FieldCache.class.getName()+".NUMERIC_UTILS_FLOAT_PARSER";
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return NumericUtils.filterPrefixCodedInts(terms.iterator(null));
}
};
/**
@ -294,14 +337,17 @@ public interface FieldCache {
public static final LongParser NUMERIC_UTILS_LONG_PARSER = new LongParser(){
@Override
public long parseLong(BytesRef term) {
if (NumericUtils.getPrefixCodedLongShift(term) > 0)
throw new StopFillCacheException();
return NumericUtils.prefixCodedToLong(term);
}
@Override
public String toString() {
return FieldCache.class.getName()+".NUMERIC_UTILS_LONG_PARSER";
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return NumericUtils.filterPrefixCodedLongs(terms.iterator(null));
}
};
/**
@ -311,14 +357,17 @@ public interface FieldCache {
public static final DoubleParser NUMERIC_UTILS_DOUBLE_PARSER = new DoubleParser(){
@Override
public double parseDouble(BytesRef term) {
if (NumericUtils.getPrefixCodedLongShift(term) > 0)
throw new StopFillCacheException();
return NumericUtils.sortableLongToDouble(NumericUtils.prefixCodedToLong(term));
}
@Override
public String toString() {
return FieldCache.class.getName()+".NUMERIC_UTILS_DOUBLE_PARSER";
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return NumericUtils.filterPrefixCodedLongs(terms.iterator(null));
}
};
@ -634,7 +683,7 @@ public interface FieldCache {
return b.toString();
}
}
/**
* EXPERT: Generates an array of CacheEntry objects representing all items
* currently in the FieldCache.

View File

@ -298,7 +298,7 @@ class FieldCacheImpl implements FieldCache {
}
}
final TermsEnum termsEnum = terms.iterator(null);
final TermsEnum termsEnum = termsEnum(terms);
DocsEnum docs = null;
FixedBitSet docsWithField = null;
@ -307,11 +307,7 @@ class FieldCacheImpl implements FieldCache {
if (term == null) {
break;
}
try {
visitTerm(term);
} catch (StopFillCacheException stop) {
break;
}
visitTerm(term);
docs = termsEnum.docs(null, docs, DocsEnum.FLAG_NONE);
while (true) {
final int docID = docs.nextDoc();
@ -331,6 +327,7 @@ class FieldCacheImpl implements FieldCache {
}
}
protected abstract TermsEnum termsEnum(Terms terms) throws IOException;
protected abstract void visitTerm(BytesRef term);
protected abstract void visitDoc(int docID);
}
@ -425,6 +422,11 @@ class FieldCacheImpl implements FieldCache {
public void visitDoc(int docID) {
values[docID] = currentValue;
}
@Override
protected TermsEnum termsEnum(Terms terms) throws IOException {
return parser.termsEnum(terms);
}
};
u.uninvert(reader, key.field, setDocsWithField);
@ -505,6 +507,11 @@ class FieldCacheImpl implements FieldCache {
public void visitDoc(int docID) {
values[docID] = currentValue;
}
@Override
protected TermsEnum termsEnum(Terms terms) throws IOException {
return parser.termsEnum(terms);
}
};
u.uninvert(reader, key.field, setDocsWithField);
@ -610,6 +617,11 @@ class FieldCacheImpl implements FieldCache {
public void visitDoc(int docID) {
values[docID] = currentValue;
}
@Override
protected TermsEnum termsEnum(Terms terms) throws IOException {
return parser.termsEnum(terms);
}
};
u.uninvert(reader, key.field, setDocsWithField);
@ -779,6 +791,11 @@ class FieldCacheImpl implements FieldCache {
public void visitDoc(int docID) {
values[docID] = currentValue;
}
@Override
protected TermsEnum termsEnum(Terms terms) throws IOException {
return parser.termsEnum(terms);
}
};
u.uninvert(reader, key.field, setDocsWithField);
@ -877,6 +894,11 @@ class FieldCacheImpl implements FieldCache {
public void visitDoc(int docID) {
values[docID] = currentValue;
}
@Override
protected TermsEnum termsEnum(Terms terms) throws IOException {
return parser.termsEnum(terms);
}
};
u.uninvert(reader, key.field, setDocsWithField);
@ -975,6 +997,11 @@ class FieldCacheImpl implements FieldCache {
public void visitDoc(int docID) {
values[docID] = currentValue;
}
@Override
protected TermsEnum termsEnum(Terms terms) throws IOException {
return parser.termsEnum(terms);
}
};
u.uninvert(reader, key.field, setDocsWithField);

View File

@ -22,6 +22,8 @@ import org.apache.lucene.document.DoubleField; // javadocs
import org.apache.lucene.document.FloatField; // javadocs
import org.apache.lucene.document.IntField; // javadocs
import org.apache.lucene.document.LongField; // javadocs
import org.apache.lucene.index.FilteredTermsEnum;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.NumericRangeFilter;
import org.apache.lucene.search.NumericRangeQuery; // for javadocs
@ -456,4 +458,41 @@ public final class NumericUtils {
}
/**
* Filters the given {@link TermsEnum} by accepting only prefix coded 64 bit
* terms with a shift value of <tt>0</tt>.
*
* @param termsEnum
* the terms enum to filter
* @return a filtered {@link TermsEnum} that only returns prefix coded 64 bit
* terms with a shift value of <tt>0</tt>.
*/
public static TermsEnum filterPrefixCodedLongs(TermsEnum termsEnum) {
return new FilteredTermsEnum(termsEnum, false) {
@Override
protected AcceptStatus accept(BytesRef term) {
return NumericUtils.getPrefixCodedLongShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END;
}
};
}
/**
* Filters the given {@link TermsEnum} by accepting only prefix coded 32 bit
* terms with a shift value of <tt>0</tt>.
*
* @param termsEnum
* the terms enum to filter
* @return a filtered {@link TermsEnum} that only returns prefix coded 32 bit
* terms with a shift value of <tt>0</tt>.
*/
public static TermsEnum filterPrefixCodedInts(TermsEnum termsEnum) {
return new FilteredTermsEnum(termsEnum, false) {
@Override
protected AcceptStatus accept(BytesRef term) {
return NumericUtils.getPrefixCodedIntShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END;
}
};
}
}

View File

@ -20,6 +20,8 @@ package org.apache.lucene.search;
import java.io.IOException;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
@ -94,6 +96,11 @@ final class JustCompileSearch {
public long parseLong(BytesRef string) {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}
@Override
public TermsEnum termsEnum(Terms terms) {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}
}
@ -103,6 +110,11 @@ final class JustCompileSearch {
public double parseDouble(BytesRef term) {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}
@Override
public TermsEnum termsEnum(Terms terms) {
throw new UnsupportedOperationException(UNSUPPORTED_MSG);
}
}

View File

@ -51,6 +51,8 @@ import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.StorableField;
import org.apache.lucene.index.StoredDocument;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.FieldValueHitQueue.Entry;
import org.apache.lucene.store.Directory;
@ -625,8 +627,13 @@ public class TestSort extends LuceneTestCase {
public final int parseInt(final BytesRef term) {
return (term.bytes[term.offset]-'A') * 123456;
}
}), SortField.FIELD_DOC);
assertMatches(full, queryA, sort, "JIHGFEDCBA");
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
}), SortField.FIELD_DOC );
assertMatches (full, queryA, sort, "JIHGFEDCBA");
assertSaneFieldCaches(getTestName() + " IntParser");
fc.purgeAllCaches();
@ -635,8 +642,12 @@ public class TestSort extends LuceneTestCase {
public final float parseFloat(final BytesRef term) {
return (float) Math.sqrt( term.bytes[term.offset]);
}
}), SortField.FIELD_DOC);
assertMatches(full, queryA, sort, "JIHGFEDCBA");
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
}), SortField.FIELD_DOC );
assertMatches (full, queryA, sort, "JIHGFEDCBA");
assertSaneFieldCaches(getTestName() + " FloatParser");
fc.purgeAllCaches();
@ -645,8 +656,13 @@ public class TestSort extends LuceneTestCase {
public final long parseLong(final BytesRef term) {
return (term.bytes[term.offset]-'A') * 1234567890L;
}
}), SortField.FIELD_DOC);
assertMatches(full, queryA, sort, "JIHGFEDCBA");
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
}), SortField.FIELD_DOC );
assertMatches (full, queryA, sort, "JIHGFEDCBA");
assertSaneFieldCaches(getTestName() + " LongParser");
fc.purgeAllCaches();
@ -655,8 +671,12 @@ public class TestSort extends LuceneTestCase {
public final double parseDouble(final BytesRef term) {
return Math.pow( term.bytes[term.offset], (term.bytes[term.offset]-'A'));
}
}), SortField.FIELD_DOC);
assertMatches(full, queryA, sort, "JIHGFEDCBA");
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
}), SortField.FIELD_DOC );
assertMatches (full, queryA, sort, "JIHGFEDCBA");
assertSaneFieldCaches(getTestName() + " DoubleParser");
fc.purgeAllCaches();
@ -665,8 +685,13 @@ public class TestSort extends LuceneTestCase {
public final byte parseByte(final BytesRef term) {
return (byte) (term.bytes[term.offset]-'A');
}
}), SortField.FIELD_DOC);
assertMatches(full, queryA, sort, "JIHGFEDCBA");
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
}), SortField.FIELD_DOC );
assertMatches (full, queryA, sort, "JIHGFEDCBA");
assertSaneFieldCaches(getTestName() + " ByteParser");
fc.purgeAllCaches();
@ -675,8 +700,12 @@ public class TestSort extends LuceneTestCase {
public final short parseShort(final BytesRef term) {
return (short) (term.bytes[term.offset]-'A');
}
}), SortField.FIELD_DOC);
assertMatches(full, queryA, sort, "JIHGFEDCBA");
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
}), SortField.FIELD_DOC );
assertMatches (full, queryA, sort, "JIHGFEDCBA");
assertSaneFieldCaches(getTestName() + " ShortParser");
fc.purgeAllCaches();
}
@ -752,6 +781,11 @@ public class TestSort extends LuceneTestCase {
public final int parseInt(final BytesRef term) {
return (term.bytes[term.offset]-'A') * 123456;
}
@Override
public TermsEnum termsEnum(Terms terms) throws IOException {
return terms.iterator(null);
}
};
@Override

View File

@ -27,6 +27,7 @@
<path id="classpath">
<pathelement path="${memory.jar}"/>
<pathelement path="${queries.jar}"/>
<path refid="base.classpath"/>
</path>

View File

@ -187,11 +187,9 @@ public class Highlighter
ArrayList<TextFragment> docFrags = new ArrayList<TextFragment>();
StringBuilder newText=new StringBuilder();
CharTermAttribute termAtt = tokenStream.addAttribute(CharTermAttribute.class);
OffsetAttribute offsetAtt = tokenStream.addAttribute(OffsetAttribute.class);
tokenStream.addAttribute(PositionIncrementAttribute.class);
tokenStream.reset();
CharTermAttribute termAtt = tokenStream.addAttribute(CharTermAttribute.class);
OffsetAttribute offsetAtt = tokenStream.addAttribute(OffsetAttribute.class);
tokenStream.reset();
TextFragment currentFrag = new TextFragment(newText,newText.length(), docFrags.size());
if (fragmentScorer instanceof QueryScorer) {

View File

@ -18,7 +18,7 @@ package org.apache.lucene.search.highlight;
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
@ -29,11 +29,20 @@ import java.util.TreeSet;
import org.apache.lucene.analysis.CachingTokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.FilterAtomicReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.queries.CommonTermsQuery;
import org.apache.lucene.search.*;
import org.apache.lucene.search.spans.FieldMaskingSpanQuery;
import org.apache.lucene.search.spans.SpanFirstQuery;
@ -44,6 +53,8 @@ import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.search.spans.Spans;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.IOUtils;
/**
* Class used to extract {@link WeightedSpanTerm}s from a {@link Query} based on whether
@ -53,12 +64,13 @@ public class WeightedSpanTermExtractor {
private String fieldName;
private TokenStream tokenStream;
private Map<String,AtomicReaderContext> readers = new HashMap<String,AtomicReaderContext>(10);
private String defaultField;
private boolean expandMultiTermQuery;
private boolean cachedTokenStream;
private boolean wrapToCaching = true;
private int maxDocCharsToAnalyze;
private AtomicReader reader = null;
public WeightedSpanTermExtractor() {
}
@ -69,18 +81,6 @@ public class WeightedSpanTermExtractor {
}
}
private void closeReaders() {
Collection<AtomicReaderContext> ctxSet = readers.values();
for (final AtomicReaderContext ctx : ctxSet) {
try {
ctx.reader().close();
} catch (IOException e) {
// alert?
}
}
}
/**
* Fills a <code>Map</code> with <@link WeightedSpanTerm>s using the terms from the supplied <code>Query</code>.
*
@ -146,21 +146,14 @@ public class WeightedSpanTermExtractor {
if (q != null) {
extract(q, terms);
}
} else if (query instanceof CommonTermsQuery) {
// specialized since rewriting would change the result query
// this query is TermContext sensitive.
extractWeightedTerms(terms, query);
} else if (query instanceof DisjunctionMaxQuery) {
for (Iterator<Query> iterator = ((DisjunctionMaxQuery) query).iterator(); iterator.hasNext();) {
extract(iterator.next(), terms);
}
} else if (query instanceof MultiTermQuery && expandMultiTermQuery) {
MultiTermQuery mtq = ((MultiTermQuery)query);
if(mtq.getRewriteMethod() != MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE) {
mtq = (MultiTermQuery) mtq.clone();
mtq.setRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
query = mtq;
}
if (mtq.getField() != null) {
IndexReader ir = getLeafContextForField(mtq.getField()).reader();
extract(query.rewrite(ir), terms);
}
} else if (query instanceof MultiPhraseQuery) {
final MultiPhraseQuery mpq = (MultiPhraseQuery) query;
final List<Term[]> termArrays = mpq.getTermArrays();
@ -210,12 +203,30 @@ public class WeightedSpanTermExtractor {
sp.setBoost(query.getBoost());
extractWeightedSpanTerms(terms, sp);
}
} else {
Query origQuery = query;
if (query instanceof MultiTermQuery) {
if (!expandMultiTermQuery) {
return;
}
MultiTermQuery copy = (MultiTermQuery) query.clone();
copy.setRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
origQuery = copy;
}
final IndexReader reader = getLeafContext().reader();
Query rewritten = origQuery.rewrite(reader);
if (rewritten != origQuery) {
// only rewrite once and then flatten again - the rewritten query could have a speacial treatment
// if this method is overwritten in a subclass or above in the next recursion
extract(rewritten, terms);
}
}
extractUnknownQuery(query, terms);
}
protected void extractUnknownQuery(Query query,
Map<String, WeightedSpanTerm> terms) throws IOException {
// for sub-classing to extract custom queries
}
@ -249,7 +260,7 @@ public class WeightedSpanTermExtractor {
final boolean mustRewriteQuery = mustRewriteQuery(spanQuery);
if (mustRewriteQuery) {
for (final String field : fieldNames) {
final SpanQuery rewrittenQuery = (SpanQuery) spanQuery.rewrite(getLeafContextForField(field).reader());
final SpanQuery rewrittenQuery = (SpanQuery) spanQuery.rewrite(getLeafContext().reader());
queries.put(field, rewrittenQuery);
rewrittenQuery.extractTerms(nonWeightedTerms);
}
@ -266,7 +277,7 @@ public class WeightedSpanTermExtractor {
} else {
q = spanQuery;
}
AtomicReaderContext context = getLeafContextForField(field);
AtomicReaderContext context = getLeafContext();
Map<Term,TermContext> termContexts = new HashMap<Term,TermContext>();
TreeSet<Term> extractedTerms = new TreeSet<Term>();
q.extractTerms(extractedTerms);
@ -338,23 +349,79 @@ public class WeightedSpanTermExtractor {
return rv;
}
protected AtomicReaderContext getLeafContextForField(String field) throws IOException {
if(wrapToCaching && !cachedTokenStream && !(tokenStream instanceof CachingTokenFilter)) {
tokenStream = new CachingTokenFilter(new OffsetLimitTokenFilter(tokenStream, maxDocCharsToAnalyze));
cachedTokenStream = true;
}
AtomicReaderContext context = readers.get(field);
if (context == null) {
MemoryIndex indexer = new MemoryIndex();
indexer.addField(field, new OffsetLimitTokenFilter(tokenStream, maxDocCharsToAnalyze));
protected AtomicReaderContext getLeafContext() throws IOException {
if (reader == null) {
if(wrapToCaching && !(tokenStream instanceof CachingTokenFilter)) {
assert !cachedTokenStream;
tokenStream = new CachingTokenFilter(new OffsetLimitTokenFilter(tokenStream, maxDocCharsToAnalyze));
cachedTokenStream = true;
}
final MemoryIndex indexer = new MemoryIndex(true);
indexer.addField(DelegatingAtomicReader.FIELD_NAME, tokenStream);
tokenStream.reset();
IndexSearcher searcher = indexer.createSearcher();
final IndexSearcher searcher = indexer.createSearcher();
// MEM index has only atomic ctx
context = (AtomicReaderContext) searcher.getTopReaderContext();
readers.put(field, context);
reader = new DelegatingAtomicReader(((AtomicReaderContext)searcher.getTopReaderContext()).reader());
}
return reader.getContext();
}
/*
* This reader will just delegate every call to a single field in the wrapped
* AtomicReader. This way we only need to build this field once rather than
* N-Times
*/
static final class DelegatingAtomicReader extends FilterAtomicReader {
private static final String FIELD_NAME = "shadowed_field";
DelegatingAtomicReader(AtomicReader in) {
super(in);
}
@Override
public FieldInfos getFieldInfos() {
throw new UnsupportedOperationException();
}
return context;
@Override
public Fields fields() throws IOException {
return new FilterFields(super.fields()) {
@Override
public Terms terms(String field) throws IOException {
return super.terms(DelegatingAtomicReader.FIELD_NAME);
}
@Override
public Iterator<String> iterator() {
return Collections.singletonList(DelegatingAtomicReader.FIELD_NAME).iterator();
}
@Override
public int size() {
return 1;
}
};
}
@Override
public NumericDocValues getNumericDocValues(String field) throws IOException {
return super.getNumericDocValues(FIELD_NAME);
}
@Override
public BinaryDocValues getBinaryDocValues(String field) throws IOException {
return super.getBinaryDocValues(FIELD_NAME);
}
@Override
public SortedDocValues getSortedDocValues(String field) throws IOException {
return super.getSortedDocValues(FIELD_NAME);
}
@Override
public NumericDocValues getNormValues(String field) throws IOException {
return super.getNormValues(FIELD_NAME);
}
}
/**
@ -401,7 +468,7 @@ public class WeightedSpanTermExtractor {
try {
extract(query, terms);
} finally {
closeReaders();
IOUtils.close(reader);
}
return terms;
@ -449,8 +516,7 @@ public class WeightedSpanTermExtractor {
weightedSpanTerm.weight *= idf;
}
} finally {
closeReaders();
IOUtils.close(reader);
}
return terms;

View File

@ -28,9 +28,12 @@ import java.util.Set;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.CommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
@ -92,8 +95,7 @@ public class FieldQuery {
if( !clause.isProhibited() )
flatten( clause.getQuery(), reader, flatQueries );
}
}
else if( sourceQuery instanceof DisjunctionMaxQuery ){
} else if( sourceQuery instanceof DisjunctionMaxQuery ){
DisjunctionMaxQuery dmq = (DisjunctionMaxQuery)sourceQuery;
for( Query query : dmq ){
flatten( query, reader, flatQueries );
@ -103,12 +105,6 @@ public class FieldQuery {
if( !flatQueries.contains( sourceQuery ) )
flatQueries.add( sourceQuery );
}
else if (sourceQuery instanceof MultiTermQuery && reader != null) {
MultiTermQuery copy = (MultiTermQuery) sourceQuery.clone();
copy.setRewriteMethod(new MultiTermQuery.TopTermsScoringBooleanQueryRewrite(MAX_MTQ_TERMS));
BooleanQuery mtqTerms = (BooleanQuery) copy.rewrite(reader);
flatten(mtqTerms, reader, flatQueries);
}
else if( sourceQuery instanceof PhraseQuery ){
if( !flatQueries.contains( sourceQuery ) ){
PhraseQuery pq = (PhraseQuery)sourceQuery;
@ -118,6 +114,31 @@ public class FieldQuery {
flatQueries.add( new TermQuery( pq.getTerms()[0] ) );
}
}
} else if (sourceQuery instanceof ConstantScoreQuery) {
final Query q = ((ConstantScoreQuery) sourceQuery).getQuery();
if (q != null) {
flatten(q, reader, flatQueries);
}
} else if (sourceQuery instanceof FilteredQuery) {
final Query q = ((FilteredQuery) sourceQuery).getQuery();
if (q != null) {
flatten(q, reader, flatQueries);
}
} else if (reader != null){
Query query = sourceQuery;
if (sourceQuery instanceof MultiTermQuery) {
MultiTermQuery copy = (MultiTermQuery) sourceQuery.clone();
copy.setRewriteMethod(new MultiTermQuery.TopTermsScoringBooleanQueryRewrite(MAX_MTQ_TERMS));
query = copy;
}
Query rewritten = query.rewrite(reader);
if (rewritten != query) {
// only rewrite once and then flatten again - the rewritten query could have a speacial treatment
// if this method is overwritten in a subclass.
flatten(rewritten, reader, flatQueries);
}
// if the query is already rewritten we discard it
}
// else discard queries
}

View File

@ -46,6 +46,7 @@ import org.apache.lucene.index.StoredDocument;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.CommonTermsQuery;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.highlight.SynonymTokenizer.TestHighlightRunner;
@ -114,6 +115,87 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
}
}
public void testHighlightingCommonTermsQuery() throws Exception {
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true);
CommonTermsQuery query = new CommonTermsQuery(Occur.MUST, Occur.SHOULD, 3);
query.add(new Term(FIELD_NAME, "this"));
query.add(new Term(FIELD_NAME, "long"));
query.add(new Term(FIELD_NAME, "very"));
searcher = new IndexSearcher(reader);
TopDocs hits = searcher.search(query, 10);
assertEquals(2, hits.totalHits);
QueryScorer scorer = new QueryScorer(query, FIELD_NAME);
Highlighter highlighter = new Highlighter(scorer);
StoredDocument doc = searcher.doc(hits.scoreDocs[0].doc);
String storedField = doc.get(FIELD_NAME);
TokenStream stream = TokenSources.getAnyTokenStream(searcher
.getIndexReader(), hits.scoreDocs[0].doc, FIELD_NAME, doc, analyzer);
Fragmenter fragmenter = new SimpleSpanFragmenter(scorer);
highlighter.setTextFragmenter(fragmenter);
String fragment = highlighter.getBestFragment(stream, storedField);
assertEquals("Hello <B>this</B> is a piece of text that is <B>very</B> <B>long</B> and contains too much preamble and the meat is really here which says kennedy has been shot", fragment);
doc = searcher.doc(hits.scoreDocs[1].doc);
storedField = doc.get(FIELD_NAME);
stream = TokenSources.getAnyTokenStream(searcher
.getIndexReader(), hits.scoreDocs[1].doc, FIELD_NAME, doc, analyzer);
highlighter.setTextFragmenter(new SimpleSpanFragmenter(scorer));
fragment = highlighter.getBestFragment(stream, storedField);
assertEquals("<B>This</B> piece of text refers to Kennedy at the beginning then has a longer piece of text that is <B>very</B>", fragment);
}
public void testHighlightUnknowQueryAfterRewrite() throws IOException, InvalidTokenOffsetsException {
Query query = new Query() {
@Override
public Query rewrite(IndexReader reader) throws IOException {
CommonTermsQuery query = new CommonTermsQuery(Occur.MUST, Occur.SHOULD, 3);
query.add(new Term(FIELD_NAME, "this"));
query.add(new Term(FIELD_NAME, "long"));
query.add(new Term(FIELD_NAME, "very"));
return query;
}
@Override
public String toString(String field) {
return null;
}
};
Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.SIMPLE, true);
searcher = new IndexSearcher(reader);
TopDocs hits = searcher.search(query, 10);
assertEquals(2, hits.totalHits);
QueryScorer scorer = new QueryScorer(query, FIELD_NAME);
Highlighter highlighter = new Highlighter(scorer);
StoredDocument doc = searcher.doc(hits.scoreDocs[0].doc);
String storedField = doc.get(FIELD_NAME);
TokenStream stream = TokenSources.getAnyTokenStream(searcher
.getIndexReader(), hits.scoreDocs[0].doc, FIELD_NAME, doc, analyzer);
Fragmenter fragmenter = new SimpleSpanFragmenter(scorer);
highlighter.setTextFragmenter(fragmenter);
String fragment = highlighter.getBestFragment(stream, storedField);
assertEquals("Hello <B>this</B> is a piece of text that is <B>very</B> <B>long</B> and contains too much preamble and the meat is really here which says kennedy has been shot", fragment);
doc = searcher.doc(hits.scoreDocs[1].doc);
storedField = doc.get(FIELD_NAME);
stream = TokenSources.getAnyTokenStream(searcher
.getIndexReader(), hits.scoreDocs[1].doc, FIELD_NAME, doc, analyzer);
highlighter.setTextFragmenter(new SimpleSpanFragmenter(scorer));
fragment = highlighter.getBestFragment(stream, storedField);
assertEquals("<B>This</B> piece of text refers to Kennedy at the beginning then has a longer piece of text that is <B>very</B>", fragment);
}
public void testHighlightingWithDefaultField() throws Exception {
String s1 = "I call our world Flatland, not because we call it so,";
@ -150,7 +232,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
"Query in a named field does not result in highlighting when that field isn't in the query",
s1, highlightField(q, FIELD_NAME, s1));
}
/**
* This method intended for use with <tt>testHighlightingWithDefaultField()</tt>
*/
@ -603,7 +685,7 @@ public class HighlighterTest extends BaseTokenStreamTestCase implements Formatte
// Not sure we can assert anything here - just running to check we dont
// throw any exceptions
}
public void testSpanHighlighting() throws Exception {
Query query1 = new SpanNearQuery(new SpanQuery[] {
new SpanTermQuery(new Term(FIELD_NAME, "wordx")),

View File

@ -18,6 +18,8 @@ package org.apache.lucene.search.vectorhighlight;
import java.io.IOException;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.analysis.MockTokenFilter;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
@ -26,7 +28,13 @@ import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.CommonTermsQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.highlight.SimpleSpanFragmenter;
import org.apache.lucene.search.highlight.TokenSources;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
@ -62,4 +70,47 @@ public class FastVectorHighlighterTest extends LuceneTestCase {
writer.close();
dir.close();
}
public void testCommonTermsQueryHighlightTest() throws IOException {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(random(), MockTokenizer.SIMPLE, true, MockTokenFilter.ENGLISH_STOPSET, true)));
FieldType type = new FieldType(TextField.TYPE_STORED);
type.setStoreTermVectorOffsets(true);
type.setStoreTermVectorPositions(true);
type.setStoreTermVectors(true);
type.freeze();
String[] texts = {
"Hello this is a piece of text that is very long and contains too much preamble and the meat is really here which says kennedy has been shot",
"This piece of text refers to Kennedy at the beginning then has a longer piece of text that is very long in the middle and finally ends with another reference to Kennedy",
"JFK has been shot", "John Kennedy has been shot",
"This text has a typo in referring to Keneddy",
"wordx wordy wordz wordx wordy wordx worda wordb wordy wordc", "y z x y z a b", "lets is a the lets is a the lets is a the lets" };
for (int i = 0; i < texts.length; i++) {
Document doc = new Document();
Field field = new Field("field", texts[i], type);
doc.add(field);
writer.addDocument(doc);
}
CommonTermsQuery query = new CommonTermsQuery(Occur.MUST, Occur.SHOULD, 2);
query.add(new Term("field", "text"));
query.add(new Term("field", "long"));
query.add(new Term("field", "very"));
FastVectorHighlighter highlighter = new FastVectorHighlighter();
IndexReader reader = DirectoryReader.open(writer, true);
IndexSearcher searcher = new IndexSearcher(reader);
TopDocs hits = searcher.search(query, 10);
assertEquals(2, hits.totalHits);
FieldQuery fieldQuery = highlighter.getFieldQuery(query, reader);
String[] bestFragments = highlighter.getBestFragments(fieldQuery, reader, hits.scoreDocs[0].doc, "field", 1000, 1);
assertEquals("This piece of <b>text</b> refers to Kennedy at the beginning then has a longer piece of <b>text</b> that is <b>very</b> <b>long</b> in the middle and finally ends with another reference to Kennedy", bestFragments[0]);
fieldQuery = highlighter.getFieldQuery(query, reader);
bestFragments = highlighter.getBestFragments(fieldQuery, reader, hits.scoreDocs[1].doc, "field", 1000, 1);
assertEquals("Hello this is a piece of <b>text</b> that is <b>very</b> <b>long</b> and contains too much preamble and the meat is really here which says kennedy has been shot", bestFragments[0]);
reader.close();
writer.close();
dir.close();
}
}

View File

@ -23,8 +23,13 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query;
@ -35,6 +40,7 @@ import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.WildcardQuery;
import org.apache.lucene.search.vectorhighlight.FieldQuery.QueryPhraseMap;
import org.apache.lucene.search.vectorhighlight.FieldTermStack.TermInfo;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
public class FieldQueryTest extends AbstractTestCase {
@ -905,4 +911,40 @@ public class FieldQueryTest extends AbstractTestCase {
assertNotNull (fq.searchPhrase(F, phraseCandidate));
}
public void testStopRewrite() throws Exception {
Query q = new Query() {
@Override
public String toString(String field) {
return "DummyQuery";
}
};
make1d1fIndex( "a" );
assertNotNull(reader);
new FieldQuery(q, reader, true, true );
}
public void testFlattenFilteredQuery() throws Exception {
Query query = new FilteredQuery(pqF( "A" ), new Filter() {
@Override
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs)
throws IOException {
return null;
}
});
FieldQuery fq = new FieldQuery( query, true, true );
Set<Query> flatQueries = new HashSet<Query>();
fq.flatten( query, reader, flatQueries );
assertCollectionQueries( flatQueries, tq( "A" ) );
}
public void testFlattenConstantScoreQuery() throws Exception {
Query query = new ConstantScoreQuery(pqF( "A" ));
FieldQuery fq = new FieldQuery( query, true, true );
Set<Query> flatQueries = new HashSet<Query>();
fq.flatten( query, reader, flatQueries );
assertCollectionQueries( flatQueries, tq( "A" ) );
}
}

View File

@ -1 +0,0 @@
dd6ba5c392d4102458494e29f54f70ac534ec2a2

View File

@ -1,29 +0,0 @@
Copyright (c) 2000-2011 INRIA, France Telecom
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the copyright holders nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -1,2 +0,0 @@
ASM - Lightweight Java Bytecode Manipulation Framework
Copyright © 1999-2012, OW2 Consortium

View File

@ -465,7 +465,9 @@ public class MemoryIndex {
throw new RuntimeException(e);
} finally {
try {
if (stream != null) stream.close();
if (stream != null) {
stream.close();
}
} catch (IOException e2) {
throw new RuntimeException(e2);
}

View File

@ -162,7 +162,7 @@ PARSER_END(QueryParser)
| <CARAT: "^" > : Boost
| <QUOTED: "\"" (<_QUOTED_CHAR>)* "\"">
| <TERM: <_TERM_START_CHAR> (<_TERM_CHAR>)* >
| <FUZZY_SLOP: "~" ( (<_NUM_CHAR>)+ ( "." (<_NUM_CHAR>)+ )? )? >
| <FUZZY_SLOP: "~" ((<_NUM_CHAR>)+ (( "." (<_NUM_CHAR>)+ )? (<_TERM_CHAR>)*) | (<_TERM_CHAR>)*) >
| <PREFIXTERM: ("*") | ( <_TERM_START_CHAR> (<_TERM_CHAR>)* "*" ) >
| <WILDTERM: (<_TERM_START_CHAR> | [ "*", "?" ]) (<_TERM_CHAR> | ( [ "*", "?" ] ))* >
| <REGEXPTERM: "/" (~[ "/" ] | "\\/" )* "/" >

View File

@ -1070,22 +1070,29 @@ public abstract class QueryParserBase implements CommonQueryParserConfiguration
} else if (regexp) {
q = getRegexpQuery(qfield, term.image.substring(1, term.image.length()-1));
} else if (fuzzy) {
float fms = fuzzyMinSim;
try {
fms = Float.valueOf(fuzzySlop.image.substring(1)).floatValue();
} catch (Exception ignored) { }
if(fms < 0.0f){
throw new ParseException("Minimum similarity for a FuzzyQuery has to be between 0.0f and 1.0f !");
} else if (fms >= 1.0f && fms != (int) fms) {
throw new ParseException("Fractional edit distances are not allowed!");
}
q = getFuzzyQuery(qfield, termImage, fms);
q = handleBareFuzzy(qfield, fuzzySlop, termImage);
} else {
q = getFieldQuery(qfield, termImage, false);
}
return q;
}
Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage)
throws ParseException {
Query q;
float fms = fuzzyMinSim;
try {
fms = Float.valueOf(fuzzySlop.image.substring(1)).floatValue();
} catch (Exception ignored) { }
if(fms < 0.0f){
throw new ParseException("Minimum similarity for a FuzzyQuery has to be between 0.0f and 1.0f !");
} else if (fms >= 1.0f && fms != (int) fms) {
throw new ParseException("Fractional edit distances are not allowed!");
}
q = getFuzzyQuery(qfield, termImage, fms);
return q;
}
// extracted from the .jj grammar
Query handleQuotedTerm(String qfield, Token term, Token fuzzySlop) throws ParseException {
int s = phraseSlop; // default

View File

@ -47,7 +47,7 @@ private int jjMoveStringLiteralDfa0_2()
case 41:
return jjStopAtPos(0, 15);
case 42:
return jjStartNfaWithStates_2(0, 17, 43);
return jjStartNfaWithStates_2(0, 17, 49);
case 43:
return jjStartNfaWithStates_2(0, 11, 15);
case 45:
@ -87,7 +87,7 @@ static final long[] jjbitVec4 = {
private int jjMoveNfa_2(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 43;
jjnewStateCnt = 49;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
@ -102,12 +102,20 @@ private int jjMoveNfa_2(int startState, int curPos)
{
switch(jjstateSet[--i])
{
case 49:
case 33:
if ((0xfbff7cf8ffffd9ffL & l) == 0L)
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 0:
if ((0xfbff54f8ffffd9ffL & l) != 0L)
{
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
jjCheckNAddTwoStates(33, 34);
}
else if ((0x100002600L & l) != 0L)
{
@ -139,14 +147,6 @@ private int jjMoveNfa_2(int startState, int curPos)
if (curChar == 38)
jjstateSet[jjnewStateCnt++] = 4;
break;
case 43:
case 27:
if ((0xfbff7cf8ffffd9ffL & l) == 0L)
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
break;
case 4:
if (curChar == 38 && kind > 8)
kind = 8;
@ -187,7 +187,7 @@ private int jjMoveNfa_2(int startState, int curPos)
break;
if (kind > 21)
kind = 21;
jjAddStates(11, 12);
jjCheckNAddStates(11, 14);
break;
case 23:
if (curChar == 46)
@ -198,62 +198,86 @@ private int jjMoveNfa_2(int startState, int curPos)
break;
if (kind > 21)
kind = 21;
jjCheckNAdd(24);
jjCheckNAddStates(15, 17);
break;
case 25:
if ((0x7bff78f8ffffd9ffL & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 27:
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 28:
if ((0x7bff78f8ffffd9ffL & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 30:
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 31:
if (curChar == 42 && kind > 22)
kind = 22;
break;
case 26:
case 32:
if ((0xfbff54f8ffffd9ffL & l) == 0L)
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
jjCheckNAddTwoStates(33, 34);
break;
case 29:
case 35:
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
jjCheckNAddTwoStates(33, 34);
break;
case 30:
case 32:
case 36:
case 38:
if (curChar == 47)
jjCheckNAddStates(0, 2);
break;
case 31:
case 37:
if ((0xffff7fffffffffffL & l) != 0L)
jjCheckNAddStates(0, 2);
break;
case 34:
case 40:
if (curChar == 47 && kind > 24)
kind = 24;
break;
case 35:
case 41:
if ((0x7bff50f8ffffd9ffL & l) == 0L)
break;
if (kind > 20)
kind = 20;
jjCheckNAddStates(6, 10);
break;
case 36:
case 42:
if ((0x7bff78f8ffffd9ffL & l) == 0L)
break;
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(36, 37);
jjCheckNAddTwoStates(42, 43);
break;
case 38:
case 44:
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(36, 37);
jjCheckNAddTwoStates(42, 43);
break;
case 39:
case 45:
if ((0x7bff78f8ffffd9ffL & l) != 0L)
jjCheckNAddStates(13, 15);
jjCheckNAddStates(18, 20);
break;
case 41:
jjCheckNAddStates(13, 15);
case 47:
jjCheckNAddStates(18, 20);
break;
default : break;
}
@ -266,6 +290,16 @@ private int jjMoveNfa_2(int startState, int curPos)
{
switch(jjstateSet[--i])
{
case 49:
if ((0x97ffffff87ffffffL & l) != 0L)
{
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
}
else if (curChar == 92)
jjCheckNAddTwoStates(35, 35);
break;
case 0:
if ((0x97ffffff87ffffffL & l) != 0L)
{
@ -274,18 +308,18 @@ private int jjMoveNfa_2(int startState, int curPos)
jjCheckNAddStates(6, 10);
}
else if (curChar == 92)
jjCheckNAddStates(16, 18);
jjCheckNAddStates(21, 23);
else if (curChar == 126)
{
if (kind > 21)
kind = 21;
jjstateSet[jjnewStateCnt++] = 22;
jjCheckNAddStates(24, 26);
}
if ((0x97ffffff87ffffffL & l) != 0L)
{
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
jjCheckNAddTwoStates(33, 34);
}
if (curChar == 78)
jjstateSet[jjnewStateCnt++] = 11;
@ -296,16 +330,6 @@ private int jjMoveNfa_2(int startState, int curPos)
else if (curChar == 65)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 43:
if ((0x97ffffff87ffffffL & l) != 0L)
{
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
}
else if (curChar == 92)
jjCheckNAddTwoStates(29, 29);
break;
case 1:
if (curChar == 68 && kind > 8)
kind = 8;
@ -362,75 +386,107 @@ private int jjMoveNfa_2(int startState, int curPos)
break;
if (kind > 21)
kind = 21;
jjstateSet[jjnewStateCnt++] = 22;
jjCheckNAddStates(24, 26);
break;
case 25:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 26:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
if (curChar == 92)
jjAddStates(27, 28);
break;
case 27:
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 28:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 29:
if (curChar == 92)
jjAddStates(29, 30);
break;
case 30:
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 32:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
break;
case 28:
if (curChar == 92)
jjCheckNAddTwoStates(29, 29);
break;
case 29:
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
break;
case 31:
jjAddStates(0, 2);
jjCheckNAddTwoStates(33, 34);
break;
case 33:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 34:
if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 32;
jjCheckNAddTwoStates(35, 35);
break;
case 35:
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 37:
jjAddStates(0, 2);
break;
case 39:
if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 38;
break;
case 41:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 20)
kind = 20;
jjCheckNAddStates(6, 10);
break;
case 36:
case 42:
if ((0x97ffffff87ffffffL & l) == 0L)
break;
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(36, 37);
jjCheckNAddTwoStates(42, 43);
break;
case 37:
case 43:
if (curChar == 92)
jjCheckNAddTwoStates(38, 38);
jjCheckNAddTwoStates(44, 44);
break;
case 38:
case 44:
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(36, 37);
jjCheckNAddTwoStates(42, 43);
break;
case 39:
case 45:
if ((0x97ffffff87ffffffL & l) != 0L)
jjCheckNAddStates(13, 15);
jjCheckNAddStates(18, 20);
break;
case 40:
case 46:
if (curChar == 92)
jjCheckNAddTwoStates(41, 41);
jjCheckNAddTwoStates(47, 47);
break;
case 41:
jjCheckNAddStates(13, 15);
case 47:
jjCheckNAddStates(18, 20);
break;
case 42:
case 48:
if (curChar == 92)
jjCheckNAddStates(16, 18);
jjCheckNAddStates(21, 23);
break;
default : break;
}
@ -447,6 +503,14 @@ private int jjMoveNfa_2(int startState, int curPos)
{
switch(jjstateSet[--i])
{
case 49:
case 33:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(33, 34);
break;
case 0:
if (jjCanMove_0(hiByte, i1, i2, l1, l2))
{
@ -457,7 +521,7 @@ private int jjMoveNfa_2(int startState, int curPos)
{
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
jjCheckNAddTwoStates(33, 34);
}
if (jjCanMove_2(hiByte, i1, i2, l1, l2))
{
@ -466,14 +530,6 @@ private int jjMoveNfa_2(int startState, int curPos)
jjCheckNAddStates(6, 10);
}
break;
case 43:
case 27:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
break;
case 15:
if (jjCanMove_0(hiByte, i1, i2, l1, l2) && kind > 13)
kind = 13;
@ -483,52 +539,80 @@ private int jjMoveNfa_2(int startState, int curPos)
if (jjCanMove_1(hiByte, i1, i2, l1, l2))
jjCheckNAddStates(3, 5);
break;
case 26:
case 25:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 27:
if (!jjCanMove_1(hiByte, i1, i2, l1, l2))
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(25, 26);
break;
case 28:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 30:
if (!jjCanMove_1(hiByte, i1, i2, l1, l2))
break;
if (kind > 21)
kind = 21;
jjCheckNAddTwoStates(28, 29);
break;
case 32:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
jjCheckNAddTwoStates(33, 34);
break;
case 29:
case 35:
if (!jjCanMove_1(hiByte, i1, i2, l1, l2))
break;
if (kind > 23)
kind = 23;
jjCheckNAddTwoStates(27, 28);
jjCheckNAddTwoStates(33, 34);
break;
case 31:
case 37:
if (jjCanMove_1(hiByte, i1, i2, l1, l2))
jjAddStates(0, 2);
break;
case 35:
case 41:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 20)
kind = 20;
jjCheckNAddStates(6, 10);
break;
case 36:
case 42:
if (!jjCanMove_2(hiByte, i1, i2, l1, l2))
break;
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(36, 37);
jjCheckNAddTwoStates(42, 43);
break;
case 38:
case 44:
if (!jjCanMove_1(hiByte, i1, i2, l1, l2))
break;
if (kind > 20)
kind = 20;
jjCheckNAddTwoStates(36, 37);
jjCheckNAddTwoStates(42, 43);
break;
case 39:
case 45:
if (jjCanMove_2(hiByte, i1, i2, l1, l2))
jjCheckNAddStates(13, 15);
jjCheckNAddStates(18, 20);
break;
case 41:
case 47:
if (jjCanMove_1(hiByte, i1, i2, l1, l2))
jjCheckNAddStates(13, 15);
jjCheckNAddStates(18, 20);
break;
default : break;
}
@ -541,7 +625,7 @@ private int jjMoveNfa_2(int startState, int curPos)
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 43 - (jjnewStateCnt = startsAt)))
if ((i = jjnewStateCnt) == (startsAt = 49 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch(java.io.IOException e) { return curPos; }
@ -574,7 +658,7 @@ private int jjMoveNfa_0(int startState, int curPos)
break;
if (kind > 27)
kind = 27;
jjAddStates(19, 20);
jjAddStates(31, 32);
break;
case 1:
if (curChar == 46)
@ -728,11 +812,11 @@ private int jjMoveNfa_1(int startState, int curPos)
break;
case 2:
if ((0xfffffffbffffffffL & l) != 0L)
jjCheckNAddStates(21, 23);
jjCheckNAddStates(33, 35);
break;
case 3:
if (curChar == 34)
jjCheckNAddStates(21, 23);
jjCheckNAddStates(33, 35);
break;
case 5:
if (curChar == 34 && kind > 31)
@ -765,7 +849,7 @@ private int jjMoveNfa_1(int startState, int curPos)
jjCheckNAdd(6);
break;
case 2:
jjAddStates(21, 23);
jjAddStates(33, 35);
break;
case 4:
if (curChar == 92)
@ -801,7 +885,7 @@ private int jjMoveNfa_1(int startState, int curPos)
break;
case 2:
if (jjCanMove_1(hiByte, i1, i2, l1, l2))
jjAddStates(21, 23);
jjAddStates(33, 35);
break;
case 6:
if (!jjCanMove_1(hiByte, i1, i2, l1, l2))
@ -828,8 +912,9 @@ private int jjMoveNfa_1(int startState, int curPos)
}
}
static final int[] jjnextStates = {
31, 33, 34, 17, 18, 20, 36, 39, 25, 40, 37, 22, 23, 39, 25, 40,
38, 41, 29, 0, 1, 2, 4, 5,
37, 39, 40, 17, 18, 20, 42, 45, 31, 46, 43, 22, 23, 25, 26, 24,
25, 26, 45, 31, 46, 44, 47, 35, 22, 28, 29, 27, 27, 30, 30, 0,
1, 2, 4, 5,
};
private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, long l2)
{
@ -893,8 +978,8 @@ static final long[] jjtoSkip = {
0x80L,
};
protected CharStream input_stream;
private final int[] jjrounds = new int[43];
private final int[] jjstateSet = new int[86];
private final int[] jjrounds = new int[49];
private final int[] jjstateSet = new int[98];
protected char curChar;
/** Constructor. */
public QueryParserTokenManager(CharStream stream){
@ -919,7 +1004,7 @@ private void ReInitRounds()
{
int i;
jjround = 0x80000001;
for (i = 43; i-- > 0;)
for (i = 49; i-- > 0;)
jjrounds[i] = 0x80000000;
}

View File

@ -167,11 +167,11 @@ enabling substantial customization to how a query is created.
If you want to find the document entitled "The Right Way" which contains the text "don't go this way", you can enter: </p>
<pre class="code">title:"The Right Way" AND text:go</pre>
<p>or</p>
<pre class="code">title:"Do it right" AND right</pre>
<pre class="code">title:"The Right Way" AND go</pre>
<p>Since text is the default field, the field indicator is not required.</p>
<p>Note: The field is only valid for the term that it directly precedes, so the query</p>
<pre class="code">title:Do it right</pre>
<p>Will only find "Do" in the title field. It will find "it" and "right" in the default field (in this case the text field). </p>
<pre class="code">title:The Right Way</pre>
<p>Will only find "The" in the title field. It will find "Right" and "Way" in the default field (in this case the text field). </p>
</div>

View File

@ -160,6 +160,28 @@ public class TestQueryParser extends QueryParserTestBase {
}
}
public void testFuzzySlopeExtendability() throws ParseException {
QueryParser qp = new QueryParser(TEST_VERSION_CURRENT, "a", new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)) {
@Override
Query handleBareFuzzy(String qfield, Token fuzzySlop, String termImage)
throws ParseException {
if(fuzzySlop.image.endsWith("")) {
float fms = fuzzyMinSim;
try {
fms = Float.valueOf(fuzzySlop.image.substring(1, fuzzySlop.image.length()-1)).floatValue();
} catch (Exception ignored) { }
float value = Float.parseFloat(termImage);
return getRangeQuery(qfield, Float.toString(value-fms/2.f), Float.toString(value+fms/2.f), true, true);
}
return super.handleBareFuzzy(qfield, fuzzySlop, termImage);
}
};
assertEquals(qp.parse("a:[11.95 TO 12.95]"), qp.parse("12.45~1€"));
}
@Override
public void testStarParsing() throws Exception {
final int[] type = new int[1];

View File

@ -721,9 +721,7 @@ public abstract class BaseTermVectorsFormatTestCase extends LuceneTestCase {
reader.close();
writer.close();
dir.close();
if (exception.get() != null) {
throw new RuntimeException("One thread threw an exception", exception.get());
}
assertNull("One thread threw an exception", exception.get());
}
}

View File

@ -25,7 +25,6 @@
<import file="../common-build.xml"/>
<path id="classpath">
<fileset dir="lib"/>
</path>
<!--

View File

@ -10,7 +10,6 @@
<taskdef resource="lucene-solr.antlib.xml">
<classpath>
<pathelement location="${custom-tasks.dir}/../build/tools/classes/java" />
<fileset dir="${custom-tasks.dir}/lib" includes="asm-debug-all-4.1.jar" />
</classpath>
</taskdef>
<property name="custom-tasks.loaded" value="true"/>

View File

@ -1,35 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# These methods and classes from commons-io should not be used by Solr classes (unsafe, no charset,...):
org.apache.commons.io.IOUtils#copy(java.io.InputStream,java.io.Writer)
org.apache.commons.io.IOUtils#copy(java.io.Reader,java.io.OutputStream)
org.apache.commons.io.IOUtils#readLines(java.io.InputStream)
org.apache.commons.io.IOUtils#toByteArray(java.io.Reader)
org.apache.commons.io.IOUtils#toByteArray(java.lang.String)
org.apache.commons.io.IOUtils#toCharArray(java.io.InputStream)
org.apache.commons.io.IOUtils#toInputStream(java.lang.CharSequence)
org.apache.commons.io.IOUtils#toInputStream(java.lang.String)
org.apache.commons.io.IOUtils#toString(byte[])
org.apache.commons.io.IOUtils#toString(java.io.InputStream)
org.apache.commons.io.IOUtils#toString(java.net.URI)
org.apache.commons.io.IOUtils#toString(java.net.URL)
org.apache.commons.io.IOUtils#write(byte[],java.io.Writer)
org.apache.commons.io.IOUtils#write(char[],java.io.OutputStream)
org.apache.commons.io.IOUtils#write(java.lang.CharSequence,java.io.OutputStream)
org.apache.commons.io.IOUtils#write(java.lang.StringBuffer,java.io.OutputStream)
org.apache.commons.io.IOUtils#write(java.lang.String,java.io.OutputStream)
org.apache.commons.io.IOUtils#writeLines(java.util.Collection,java.lang.String,java.io.OutputStream)

View File

@ -1,441 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# these are all deprecated in jdk 6: make them fail the build (java[x].* only)
# http://docs.oracle.com/javase/6/docs/api/deprecated-list.html
# TODO: would be great to find a simple way to autogen this
java.security.Certificate
java.rmi.server.LoaderHandler
java.rmi.registry.RegistryHandler
java.rmi.server.RemoteCall
java.rmi.server.Skeleton
javax.xml.bind.Validator
javax.accessibility.AccessibleResourceBundle
javax.management.DefaultLoaderRepository
javax.management.loading.DefaultLoaderRepository
javax.swing.text.DefaultTextUI
java.security.Identity
java.security.IdentityScope
java.io.LineNumberInputStream
java.rmi.server.LogStream
javax.swing.plaf.metal.MetalComboBoxUI$MetalComboPopup
java.rmi.server.Operation
javax.security.auth.Policy
java.security.Signer
javax.xml.soap.SOAPElementFactory
java.io.StringBufferInputStream
javax.swing.text.TableView$TableCell
java.rmi.RMISecurityException
java.rmi.ServerRuntimeException
java.rmi.server.SkeletonMismatchException
java.rmi.server.SkeletonNotFoundException
javax.jws.soap.InitParam
javax.jws.soap.SOAPMessageHandler
javax.jws.soap.SOAPMessageHandlers
javax.management.openmbean.OpenType#ALLOWED_CLASSNAMES
javax.management.monitor.Monitor#alreadyNotified
javax.swing.JViewport#backingStore
javax.swing.plaf.basic.BasicDesktopPaneUI#closeKey
java.awt.Frame#CROSSHAIR_CURSOR
javax.management.monitor.Monitor#dbgTag
java.awt.Frame#DEFAULT_CURSOR
javax.swing.JRootPane#defaultPressAction
javax.swing.JRootPane#defaultReleaseAction
javax.swing.plaf.basic.BasicSplitPaneUI#dividerResizeToggleKey
javax.swing.plaf.basic.BasicToolBarUI#downKey
javax.swing.plaf.basic.BasicTabbedPaneUI#downKey
javax.swing.plaf.basic.BasicSplitPaneUI#downKey
java.awt.Frame#E_RESIZE_CURSOR
javax.swing.plaf.basic.BasicSplitPaneUI#endKey
java.util.logging.Logger#global
java.awt.Frame#HAND_CURSOR
javax.swing.plaf.basic.BasicSplitPaneUI#homeKey
java.net.HttpURLConnection#HTTP_SERVER_ERROR
java.lang.SecurityManager#inCheck
javax.swing.plaf.basic.BasicSplitPaneUI#keyboardDownRightListener
javax.swing.plaf.basic.BasicSplitPaneUI#keyboardEndListener
javax.swing.plaf.basic.BasicSplitPaneUI#keyboardHomeListener
javax.swing.plaf.basic.BasicSplitPaneUI#keyboardResizeToggleListener
javax.swing.plaf.basic.BasicSplitPaneUI#keyboardUpLeftListener
javax.swing.plaf.basic.BasicToolBarUI#leftKey
javax.swing.plaf.basic.BasicTabbedPaneUI#leftKey
javax.swing.plaf.basic.BasicSplitPaneUI#leftKey
javax.swing.plaf.basic.BasicDesktopPaneUI#maximizeKey
javax.swing.plaf.basic.BasicDesktopPaneUI#minimizeKey
java.awt.Frame#MOVE_CURSOR
java.awt.Frame#N_RESIZE_CURSOR
javax.swing.plaf.basic.BasicDesktopPaneUI#navigateKey
javax.swing.plaf.basic.BasicDesktopPaneUI#navigateKey2
java.awt.Frame#NE_RESIZE_CURSOR
java.awt.Frame#NW_RESIZE_CURSOR
javax.swing.plaf.basic.BasicInternalFrameUI#openMenuKey
java.awt.datatransfer.DataFlavor#plainTextFlavor
javax.swing.text.html.FormView#RESET
javax.swing.table.TableColumn#resizedPostingDisableCount
javax.swing.plaf.basic.BasicToolBarUI#rightKey
javax.swing.plaf.basic.BasicTabbedPaneUI#rightKey
javax.swing.plaf.basic.BasicSplitPaneUI#rightKey
java.awt.Frame#S_RESIZE_CURSOR
java.awt.Frame#SE_RESIZE_CURSOR
javax.swing.text.html.FormView#SUBMIT
java.lang.Character$UnicodeBlock#SURROGATES_AREA
java.awt.Frame#SW_RESIZE_CURSOR
java.awt.Frame#TEXT_CURSOR
javax.swing.plaf.basic.BasicToolBarUI#upKey
javax.swing.plaf.basic.BasicTabbedPaneUI#upKey
javax.swing.plaf.basic.BasicSplitPaneUI#upKey
java.awt.Frame#W_RESIZE_CURSOR
java.awt.Frame#WAIT_CURSOR
java.awt.Component#action(java.awt.Event,java.lang.Object)
java.awt.List#addItem(java.lang.String)
java.awt.List#addItem(java.lang.String,int)
java.awt.CardLayout#addLayoutComponent(java.lang.String,java.awt.Component)
java.awt.BorderLayout#addLayoutComponent(java.lang.String,java.awt.Component)
java.awt.List#allowsMultipleSelections()
java.lang.ThreadGroup#allowThreadSuspension(boolean)
java.awt.TextArea#appendText(java.lang.String)
java.awt.Window#applyResourceBundle(java.util.ResourceBundle)
java.awt.Window#applyResourceBundle(java.lang.String)
java.awt.Component#bounds()
java.lang.SecurityManager#checkMulticast(java.net.InetAddress,byte)
java.lang.SecurityManager#classDepth(java.lang.String)
java.lang.SecurityManager#classLoaderDepth()
java.awt.List#clear()
java.awt.image.renderable.RenderContext#concetenateTransform(java.awt.geom.AffineTransform)
java.awt.Container#countComponents()
java.awt.Menu#countItems()
java.awt.Choice#countItems()
java.awt.List#countItems()
java.awt.MenuBar#countMenus()
java.lang.Thread#countStackFrames()
javax.xml.soap.SOAPElementFactory#create(javax.xml.soap.Name)
javax.xml.soap.SOAPElementFactory#create(java.lang.String)
javax.xml.soap.SOAPElementFactory#create(java.lang.String,java.lang.String,java.lang.String)
javax.swing.plaf.basic.BasicSplitPaneUI#createKeyboardDownRightListener()
javax.swing.plaf.basic.BasicSplitPaneUI#createKeyboardEndListener()
javax.swing.plaf.basic.BasicSplitPaneUI#createKeyboardHomeListener()
javax.swing.plaf.basic.BasicSplitPaneUI#createKeyboardResizeToggleListener()
javax.swing.plaf.basic.BasicSplitPaneUI#createKeyboardUpLeftListener()
javax.swing.JTable#createScrollPaneForTable(javax.swing.JTable)
javax.swing.text.TableView#createTableCell(javax.swing.text.Element)
javax.xml.bind.JAXBContext#createValidator()
java.lang.SecurityManager#currentClassLoader()
java.lang.SecurityManager#currentLoadedClass()
java.net.URLDecoder#decode(java.lang.String)
java.lang.ClassLoader#defineClass(byte[],int,int)
java.awt.List#delItem(int)
java.awt.List#delItems(int,int)
java.awt.Container#deliverEvent(java.awt.Event)
java.awt.Component#deliverEvent(java.awt.Event)
javax.management.MBeanServer#deserialize(javax.management.ObjectName,byte[])
javax.management.MBeanServer#deserialize(java.lang.String,byte[])
javax.management.MBeanServer#deserialize(java.lang.String,javax.management.ObjectName,byte[])
java.lang.Thread#destroy()
java.awt.MenuItem#disable()
java.awt.Component#disable()
javax.swing.JComponent#disable()
javax.swing.table.TableColumn#disableResizedPosting()
javax.swing.FocusManager#disableSwingFocusManager()
java.rmi.server.Skeleton#dispatch(java.rmi.Remote,java.rmi.server.RemoteCall,int,long)
java.rmi.server.RemoteCall#done()
java.rmi.server.RemoteRef#done(java.rmi.server.RemoteCall)
javax.swing.plaf.metal.MetalComboBoxUI#editablePropertyChanged(java.beans.PropertyChangeEvent)
java.awt.MenuItem#enable()
java.awt.Component#enable()
javax.swing.JComponent#enable()
java.awt.MenuItem#enable(boolean)
java.awt.Component#enable(boolean)
javax.swing.table.TableColumn#enableResizedPosting()
java.net.URLEncoder#encode(java.lang.String)
java.security.SignatureSpi#engineGetParameter(java.lang.String)
java.security.SignatureSpi#engineSetParameter(java.lang.String,java.lang.Object)
java.awt.datatransfer.DataFlavor#equals(java.lang.String)
java.rmi.server.RemoteCall#executeCall()
javax.swing.SwingUtilities#findFocusOwner(java.awt.Component)
java.security.Security#getAlgorithmProperty(java.lang.String,java.lang.String)
java.sql.CallableStatement#getBigDecimal(int,int)
java.sql.ResultSet#getBigDecimal(int,int)
java.sql.ResultSet#getBigDecimal(java.lang.String,int)
java.awt.Polygon#getBoundingBox()
java.lang.String#getBytes(int,int,byte[],int)
java.awt.Graphics#getClipRect()
javax.swing.JPopupMenu#getComponentAtIndex(int)
javax.swing.JMenuBar#getComponentAtIndex(int)
java.awt.CheckboxGroup#getCurrent()
java.awt.Frame#getCursorType()
java.sql.Time#getDate()
java.util.Date#getDate()
java.sql.Time#getDay()
java.util.Date#getDay()
java.net.URLConnection#getDefaultRequestProperty(java.lang.String)
java.rmi.server.LogStream#getDefaultStream()
javax.management.monitor.StringMonitorMBean#getDerivedGauge()
javax.management.monitor.StringMonitor#getDerivedGauge()
javax.management.monitor.GaugeMonitorMBean#getDerivedGauge()
javax.management.monitor.GaugeMonitor#getDerivedGauge()
javax.management.monitor.CounterMonitorMBean#getDerivedGauge()
javax.management.monitor.CounterMonitor#getDerivedGauge()
javax.management.monitor.StringMonitorMBean#getDerivedGaugeTimeStamp()
javax.management.monitor.StringMonitor#getDerivedGaugeTimeStamp()
javax.management.monitor.GaugeMonitorMBean#getDerivedGaugeTimeStamp()
javax.management.monitor.GaugeMonitor#getDerivedGaugeTimeStamp()
javax.management.monitor.CounterMonitorMBean#getDerivedGaugeTimeStamp()
javax.management.monitor.CounterMonitor#getDerivedGaugeTimeStamp()
javax.swing.plaf.basic.BasicSplitPaneUI#getDividerBorderSize()
javax.xml.bind.Validator#getEventHandler()
java.awt.Toolkit#getFontList()
javax.swing.text.LabelView#getFontMetrics()
java.awt.Toolkit#getFontMetrics(java.awt.Font)
java.awt.Toolkit#getFontPeer(java.lang.String,int)
java.sql.Date#getHours()
java.util.Date#getHours()
java.lang.SecurityManager#getInCheck()
java.rmi.server.RemoteCall#getInputStream()
javax.swing.KeyStroke#getKeyStroke(char,boolean)
javax.swing.AbstractButton#getLabel()
java.awt.Scrollbar#getLineIncrement()
java.lang.Runtime#getLocalizedInputStream(java.io.InputStream)
java.lang.Runtime#getLocalizedOutputStream(java.io.OutputStream)
java.sql.DriverManager#getLogStream()
java.awt.FontMetrics#getMaxDecent()
javax.swing.JInternalFrame#getMenuBar()
javax.swing.JRootPane#getMenuBar()
java.sql.Date#getMinutes()
java.util.Date#getMinutes()
java.sql.Time#getMonth()
java.util.Date#getMonth()
javax.swing.JComponent#getNextFocusableComponent()
javax.management.monitor.MonitorMBean#getObservedObject()
javax.management.monitor.Monitor#getObservedObject()
java.rmi.server.Operation#getOperation()
java.rmi.server.Skeleton#getOperations()
java.awt.ComponentOrientation#getOrientation(java.util.ResourceBundle)
java.rmi.server.LogStream#getOutputStream()
java.rmi.server.RemoteCall#getOutputStream()
java.awt.Scrollbar#getPageIncrement()
java.security.Signature#getParameter(java.lang.String)
java.awt.MenuComponent#getPeer()
java.awt.Component#getPeer()
java.awt.Font#getPeer()
javax.xml.bind.Validator#getProperty(java.lang.String)
java.rmi.server.RemoteCall#getResultStream(boolean)
java.sql.Date#getSeconds()
java.util.Date#getSeconds()
java.rmi.server.LoaderHandler#getSecurityContext(java.lang.ClassLoader)
java.rmi.server.RMIClassLoader#getSecurityContext(java.lang.ClassLoader)
javax.swing.JPasswordField#getText()
javax.swing.JPasswordField#getText(int,int)
javax.management.monitor.CounterMonitorMBean#getThreshold()
javax.management.monitor.CounterMonitor#getThreshold()
java.util.Date#getTimezoneOffset()
java.net.MulticastSocket#getTTL()
java.net.DatagramSocketImpl#getTTL()
java.sql.ResultSet#getUnicodeStream(int)
java.sql.ResultSet#getUnicodeStream(java.lang.String)
javax.swing.ScrollPaneLayout#getViewportBorderBounds(javax.swing.JScrollPane)
java.awt.Scrollbar#getVisible()
java.sql.Time#getYear()
java.util.Date#getYear()
java.awt.Component#gotFocus(java.awt.Event,java.lang.Object)
java.awt.Component#handleEvent(java.awt.Event)
java.awt.Dialog#hide()
java.awt.Window#hide()
java.awt.Component#hide()
java.lang.SecurityManager#inClass(java.lang.String)
java.lang.SecurityManager#inClassLoader()
javax.swing.text.html.HTMLEditorKit$InsertHTMLTextAction#insertAtBoundry(javax.swing.JEditorPane,javax.swing.text.html.HTMLDocument,int,javax.swing.text.Element,java.lang.String,javax.swing.text.html.HTML$Tag,javax.swing.text.html.HTML$Tag)
java.awt.TextArea#insertText(java.lang.String,int)
java.awt.Container#insets()
java.awt.Rectangle#inside(int,int)
java.awt.Component#inside(int,int)
java.awt.Polygon#inside(int,int)
java.rmi.server.RemoteRef#invoke(java.rmi.server.RemoteCall)
javax.swing.JViewport#isBackingStoreEnabled()
javax.swing.FocusManager#isFocusManagerEnabled()
java.awt.Component#isFocusTraversable()
java.lang.Character#isJavaLetter(char)
java.lang.Character#isJavaLetterOrDigit(char)
javax.swing.JComponent#isManagingFocus()
java.awt.List#isSelected(int)
java.lang.Character#isSpace(char)
java.rmi.dgc.VMID#isUnique()
javax.xml.bind.Unmarshaller#isValidating()
java.awt.Component#keyDown(java.awt.Event,int)
java.awt.Component#keyUp(java.awt.Event,int)
java.awt.Container#layout()
java.awt.ScrollPane#layout()
java.awt.Component#layout()
java.rmi.server.LoaderHandler#loadClass(java.lang.String)
java.rmi.server.RMIClassLoader#loadClass(java.lang.String)
java.rmi.server.LoaderHandler#loadClass(java.net.URL,java.lang.String)
java.awt.Container#locate(int,int)
java.awt.Component#locate(int,int)
java.awt.Component#location()
java.rmi.server.LogStream#log(java.lang.String)
java.awt.Component#lostFocus(java.awt.Event,java.lang.Object)
java.awt.TextArea#minimumSize()
java.awt.Container#minimumSize()
java.awt.List#minimumSize()
java.awt.TextField#minimumSize()
java.awt.Component#minimumSize()
java.awt.List#minimumSize(int)
java.awt.TextField#minimumSize(int)
java.awt.TextArea#minimumSize(int,int)
javax.swing.text.View#modelToView(int,java.awt.Shape)
java.awt.Component#mouseDown(java.awt.Event,int,int)
java.awt.Component#mouseDrag(java.awt.Event,int,int)
java.awt.Component#mouseEnter(java.awt.Event,int,int)
java.awt.Component#mouseExit(java.awt.Event,int,int)
java.awt.Component#mouseMove(java.awt.Event,int,int)
java.awt.Component#mouseUp(java.awt.Event,int,int)
java.awt.Rectangle#move(int,int)
java.awt.Component#move(int,int)
java.rmi.server.RemoteRef#newCall(java.rmi.server.RemoteObject,java.rmi.server.Operation[],int,long)
javax.xml.stream.XMLOutputFactory#newInstance(java.lang.String,java.lang.ClassLoader)
javax.xml.stream.XMLInputFactory#newInstance(java.lang.String,java.lang.ClassLoader)
javax.xml.stream.XMLEventFactory#newInstance(java.lang.String,java.lang.ClassLoader)
java.awt.Component#nextFocus()
java.awt.datatransfer.DataFlavor#normalizeMimeType(java.lang.String)
javax.activation.ActivationDataFlavor#normalizeMimeType(java.lang.String)
java.awt.datatransfer.DataFlavor#normalizeMimeTypeParameter(java.lang.String,java.lang.String)
javax.activation.ActivationDataFlavor#normalizeMimeTypeParameter(java.lang.String,java.lang.String)
java.util.Date#parse(java.lang.String)
java.rmi.server.LogStream#parseLevel(java.lang.String)
java.awt.MenuComponent#postEvent(java.awt.Event)
java.awt.Window#postEvent(java.awt.Event)
java.awt.Component#postEvent(java.awt.Event)
java.awt.MenuContainer#postEvent(java.awt.Event)
java.awt.image.renderable.RenderContext#preConcetenateTransform(java.awt.geom.AffineTransform)
java.awt.TextArea#preferredSize()
java.awt.Container#preferredSize()
java.awt.List#preferredSize()
java.awt.TextField#preferredSize()
java.awt.Component#preferredSize()
java.awt.List#preferredSize(int)
java.awt.TextField#preferredSize(int)
java.awt.TextArea#preferredSize(int,int)
java.io.DataInputStream#readLine()
java.io.ObjectInputStream#readLine()
java.rmi.registry.RegistryHandler#registryImpl(int)
java.rmi.registry.RegistryHandler#registryStub(java.lang.String,int)
java.rmi.server.RemoteCall#releaseInputStream()
java.rmi.server.RemoteCall#releaseOutputStream()
javax.swing.plaf.metal.MetalComboBoxUI#removeListeners()
java.awt.TextArea#replaceText(java.lang.String,int,int)
javax.swing.JComponent#requestDefaultFocus()
java.awt.Window#reshape(int,int,int,int)
java.awt.Rectangle#reshape(int,int,int,int)
java.awt.Component#reshape(int,int,int,int)
javax.swing.JComponent#reshape(int,int,int,int)
java.awt.Component#resize(java.awt.Dimension)
java.awt.Rectangle#resize(int,int)
java.awt.Component#resize(int,int)
java.lang.Thread#resume()
java.lang.ThreadGroup#resume()
java.lang.System#runFinalizersOnExit(boolean)
java.lang.Runtime#runFinalizersOnExit(boolean)
java.util.Properties#save(java.io.OutputStream,java.lang.String)
java.net.MulticastSocket#send(java.net.DatagramPacket,byte)
javax.swing.JViewport#setBackingStoreEnabled(boolean)
java.awt.CheckboxGroup#setCurrent(java.awt.Checkbox)
java.awt.Frame#setCursor(int)
java.sql.Time#setDate(int)
java.util.Date#setDate(int)
java.net.URLConnection#setDefaultRequestProperty(java.lang.String,java.lang.String)
java.rmi.server.LogStream#setDefaultStream(java.io.PrintStream)
java.awt.TextField#setEchoCharacter(char)
javax.xml.bind.Validator#setEventHandler(javax.xml.bind.ValidationEventHandler)
java.sql.Date#setHours(int)
java.util.Date#setHours(int)
javax.swing.AbstractButton#setLabel(java.lang.String)
java.awt.Scrollbar#setLineIncrement(int)
java.sql.DriverManager#setLogStream(java.io.PrintStream)
javax.management.ValueExp#setMBeanServer(javax.management.MBeanServer)
javax.swing.JInternalFrame#setMenuBar(javax.swing.JMenuBar)
javax.swing.JRootPane#setMenuBar(javax.swing.JMenuBar)
java.sql.Date#setMinutes(int)
java.util.Date#setMinutes(int)
java.awt.event.KeyEvent#setModifiers(int)
java.sql.Time#setMonth(int)
java.util.Date#setMonth(int)
java.awt.List#setMultipleSelections(boolean)
javax.swing.JComponent#setNextFocusableComponent(java.awt.Component)
javax.management.monitor.MonitorMBean#setObservedObject(javax.management.ObjectName)
javax.management.monitor.Monitor#setObservedObject(javax.management.ObjectName)
java.rmi.server.LogStream#setOutputStream(java.io.OutputStream)
java.awt.Scrollbar#setPageIncrement(int)
java.security.Signature#setParameter(java.lang.String,java.lang.Object)
javax.xml.bind.Validator#setProperty(java.lang.String,java.lang.Object)
java.rmi.server.RemoteStub#setRef(java.rmi.server.RemoteStub,java.rmi.server.RemoteRef)
java.sql.Date#setSeconds(int)
java.util.Date#setSeconds(int)
javax.management.monitor.CounterMonitorMBean#setThreshold(java.lang.Number)
javax.management.monitor.CounterMonitor#setThreshold(java.lang.Number)
java.net.MulticastSocket#setTTL(byte)
java.net.DatagramSocketImpl#setTTL(byte)
java.sql.PreparedStatement#setUnicodeStream(int,java.io.InputStream,int)
javax.sql.rowset.BaseRowSet#setUnicodeStream(int,java.io.InputStream,int)
java.net.URLStreamHandler#setURL(java.net.URL,java.lang.String,java.lang.String,int,java.lang.String,java.lang.String)
javax.xml.bind.Unmarshaller#setValidating(boolean)
java.sql.Time#setYear(int)
java.util.Date#setYear(int)
java.awt.Dialog#show()
java.awt.Window#show()
java.awt.Component#show()
java.awt.Component#show(boolean)
java.awt.Component#size()
javax.swing.JTable#sizeColumnsToFit(boolean)
java.lang.Thread#stop()
java.lang.ThreadGroup#stop()
java.lang.Thread#stop(java.lang.Throwable)
java.lang.Thread#suspend()
java.lang.ThreadGroup#suspend()
java.util.Date#toGMTString()
java.util.Date#toLocaleString()
java.rmi.server.LogStream#toString()
java.rmi.server.Operation#toString()
java.io.ByteArrayOutputStream#toString(int)
java.util.Date#UTC(int,int,int,int,int,int)
javax.xml.bind.Validator#validate(java.lang.Object)
javax.xml.bind.Validator#validateRoot(java.lang.Object)
javax.swing.text.View#viewToModel(float,float,java.awt.Shape)
java.rmi.server.LogStream#write(byte[],int,int)
java.rmi.server.LogStream#write(int)
java.io.ObjectOutputStream$PutField#write(java.io.ObjectOutput)
javax.management.AttributeValueExp#<init>()
java.sql.Date#<init>(int,int,int)
java.util.Date#<init>(int,int,int)
java.util.Date#<init>(int,int,int,int,int)
java.util.Date#<init>(int,int,int,int,int,int)
java.util.Date#<init>(java.lang.String)
java.awt.event.KeyEvent#<init>(java.awt.Component,int,long,int,int)
java.rmi.server.Operation#<init>(java.lang.String)
java.rmi.RMISecurityException#<init>(java.lang.String)
java.rmi.RMISecurityException#<init>(java.lang.String,java.lang.String)
java.rmi.ServerRuntimeException#<init>(java.lang.String,java.lang.Exception)
java.rmi.server.SkeletonMismatchException#<init>(java.lang.String)
java.net.Socket#<init>(java.net.InetAddress,int,boolean)
java.net.Socket#<init>(java.lang.String,int,boolean)
java.io.StreamTokenizer#<init>(java.io.InputStream)
java.lang.String#<init>(byte[],int)
java.lang.String#<init>(byte[],int,int,int)
java.sql.Time#<init>(int,int,int)
java.sql.Timestamp#<init>(int,int,int,int,int,int,int)

View File

@ -1,95 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# These methods and classes should not be used by Lucene classes (unsafe, no charset,...):
java.lang.String#<init>(byte[])
java.lang.String#<init>(byte[],int)
java.lang.String#<init>(byte[],int,int)
java.lang.String#<init>(byte[],int,int,int)
java.lang.String#getBytes()
java.lang.String#getBytes(int,int,byte[],int)
java.lang.String#toLowerCase()
java.lang.String#toUpperCase()
java.lang.String#format(java.lang.String,java.lang.Object[])
java.io.FileReader
java.io.FileWriter
java.io.ByteArrayOutputStream#toString()
java.io.InputStreamReader#<init>(java.io.InputStream)
java.io.OutputStreamWriter#<init>(java.io.OutputStream)
java.io.PrintStream#<init>(java.io.File)
java.io.PrintStream#<init>(java.io.OutputStream)
java.io.PrintStream#<init>(java.io.OutputStream,boolean)
java.io.PrintStream#<init>(java.lang.String)
java.io.PrintWriter#<init>(java.io.File)
java.io.PrintWriter#<init>(java.io.OutputStream)
java.io.PrintWriter#<init>(java.io.OutputStream,boolean)
java.io.PrintWriter#<init>(java.lang.String)
java.io.PrintWriter#format(java.lang.String,java.lang.Object[])
java.io.PrintWriter#printf(java.lang.String,java.lang.Object[])
java.nio.charset.Charset#displayName()
java.text.BreakIterator#getCharacterInstance()
java.text.BreakIterator#getLineInstance()
java.text.BreakIterator#getSentenceInstance()
java.text.BreakIterator#getWordInstance()
java.text.Collator#getInstance()
java.text.DateFormat#getTimeInstance()
java.text.DateFormat#getTimeInstance(int)
java.text.DateFormat#getDateInstance()
java.text.DateFormat#getDateInstance(int)
java.text.DateFormat#getDateTimeInstance()
java.text.DateFormat#getDateTimeInstance(int,int)
java.text.DateFormat#getInstance()
java.text.DateFormatSymbols#<init>()
java.text.DateFormatSymbols#getInstance()
java.text.DecimalFormat#<init>()
java.text.DecimalFormat#<init>(java.lang.String)
java.text.DecimalFormatSymbols#<init>()
java.text.DecimalFormatSymbols#getInstance()
java.text.MessageFormat#<init>(java.lang.String)
java.text.NumberFormat#getInstance()
java.text.NumberFormat#getNumberInstance()
java.text.NumberFormat#getIntegerInstance()
java.text.NumberFormat#getCurrencyInstance()
java.text.NumberFormat#getPercentInstance()
java.text.SimpleDateFormat#<init>()
java.text.SimpleDateFormat#<init>(java.lang.String)
java.util.Calendar#<init>()
java.util.Calendar#getInstance()
java.util.Calendar#getInstance(java.util.Locale)
java.util.Calendar#getInstance(java.util.TimeZone)
java.util.Currency#getSymbol()
java.util.GregorianCalendar#<init>()
java.util.GregorianCalendar#<init>(int,int,int)
java.util.GregorianCalendar#<init>(int,int,int,int,int)
java.util.GregorianCalendar#<init>(int,int,int,int,int,int)
java.util.GregorianCalendar#<init>(java.util.Locale)
java.util.GregorianCalendar#<init>(java.util.TimeZone)
java.util.Scanner#<init>(java.io.InputStream)
java.util.Scanner#<init>(java.io.File)
java.util.Scanner#<init>(java.nio.channels.ReadableByteChannel)
java.util.Formatter#<init>()
java.util.Formatter#<init>(java.lang.Appendable)
java.util.Formatter#<init>(java.io.File)
java.util.Formatter#<init>(java.io.File,java.lang.String)
java.util.Formatter#<init>(java.io.OutputStream)
java.util.Formatter#<init>(java.io.OutputStream,java.lang.String)
java.util.Formatter#<init>(java.io.PrintStream)
java.util.Formatter#<init>(java.lang.String)
java.util.Formatter#<init>(java.lang.String,java.lang.String)

View File

@ -1,22 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Don't allow System.out/System.err usage in non-test Lucene code
java.lang.System#out
java.lang.System#err
java.lang.Thread#dumpStack()
# eclipse autogenerated stubs!
java.lang.Throwable#printStackTrace()

View File

@ -18,12 +18,4 @@
-->
<ivy-module version="2.0">
<info organisation="org.apache.lucene" module="core-tools"/>
<dependencies>
<!--
We use the asm-debug-all library, as the as-all has no generics.
It's not really debug, just not "stripped":
-->
<dependency org="org.ow2.asm" name="asm-debug-all" rev="4.1" transitive="false"/>
<exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
</dependencies>
</ivy-module>

View File

@ -18,7 +18,4 @@
<taskdef
name="licenses"
classname="org.apache.lucene.validation.LicenseCheckTask" />
<taskdef
name="forbidden-apis"
classname="org.apache.lucene.validation.ForbiddenApisCheckTask" />
</antlib>

View File

@ -1,498 +0,0 @@
package org.apache.lucene.validation;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.objectweb.asm.ClassReader;
import org.objectweb.asm.Label;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.FieldVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.commons.Method;
import org.apache.tools.ant.AntClassLoader;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import org.apache.tools.ant.types.Path;
import org.apache.tools.ant.types.FileSet;
import org.apache.tools.ant.types.Reference;
import org.apache.tools.ant.types.Resource;
import org.apache.tools.ant.types.ResourceCollection;
import org.apache.tools.ant.types.resources.FileResource;
import org.apache.tools.ant.types.resources.Resources;
import org.apache.tools.ant.types.resources.FileResource;
import org.apache.tools.ant.types.resources.StringResource;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.BufferedReader;
import java.io.Reader;
import java.io.File;
import java.io.StringReader;
import java.util.Arrays;
import java.util.Collections;
import java.util.Formatter;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.HashSet;
import java.util.Set;
/**
* Task to check if a set of class files contains calls to forbidden APIs
* from a given classpath and list of API signatures (either inline or as pointer to files).
* In contrast to other ANT tasks, this tool does only visit the given classpath
* and the system classloader. It uses the local classpath in preference to the system classpath
* (which violates the spec).
*/
public class ForbiddenApisCheckTask extends Task {
private final Resources classFiles = new Resources();
private final Resources apiSignatures = new Resources();
private Path classpath = null;
private boolean failOnUnsupportedJava = false;
ClassLoader loader = null;
final Map<String,ClassSignatureLookup> classesToCheck = new HashMap<String,ClassSignatureLookup>();
final Map<String,ClassSignatureLookup> classpathClassCache = new HashMap<String,ClassSignatureLookup>();
final Map<String,String> forbiddenFields = new HashMap<String,String>();
final Map<String,String> forbiddenMethods = new HashMap<String,String>();
final Map<String,String> forbiddenClasses = new HashMap<String,String>();
/** Reads a class (binary name) from the given {@link ClassLoader}. */
ClassSignatureLookup getClassFromClassLoader(final String clazz) throws BuildException {
ClassSignatureLookup c = classpathClassCache.get(clazz);
if (c == null) {
try {
final InputStream in = loader.getResourceAsStream(clazz.replace('.', '/') + ".class");
if (in == null) {
throw new BuildException("Loading of class " + clazz + " failed: Not found");
}
try {
classpathClassCache.put(clazz, c = new ClassSignatureLookup(new ClassReader(in)));
} finally {
in.close();
}
} catch (IOException ioe) {
throw new BuildException("Loading of class " + clazz + " failed.", ioe);
}
}
return c;
}
/** Adds the method signature to the list of disallowed methods. The Signature is checked against the given ClassLoader. */
private void addSignature(final String signature) throws BuildException {
final String clazz, field;
final Method method;
int p = signature.indexOf('#');
if (p >= 0) {
clazz = signature.substring(0, p);
final String s = signature.substring(p + 1);
p = s.indexOf('(');
if (p >= 0) {
if (p == 0) {
throw new BuildException("Invalid method signature (method name missing): " + signature);
}
// we ignore the return type, its just to match easier (so return type is void):
try {
method = Method.getMethod("void " + s, true);
} catch (IllegalArgumentException iae) {
throw new BuildException("Invalid method signature: " + signature);
}
field = null;
} else {
field = s;
method = null;
}
} else {
clazz = signature;
method = null;
field = null;
}
// check class & method/field signature, if it is really existent (in classpath), but we don't really load the class into JVM:
final ClassSignatureLookup c = getClassFromClassLoader(clazz);
if (method != null) {
assert field == null;
// list all methods with this signature:
boolean found = false;
for (final Method m : c.methods) {
if (m.getName().equals(method.getName()) && Arrays.equals(m.getArgumentTypes(), method.getArgumentTypes())) {
found = true;
forbiddenMethods.put(c.reader.getClassName() + '\000' + m, signature);
// don't break when found, as there may be more covariant overrides!
}
}
if (!found) {
throw new BuildException("No method found with following signature: " + signature);
}
} else if (field != null) {
assert method == null;
if (!c.fields.contains(field)) {
throw new BuildException("No field found with following name: " + signature);
}
forbiddenFields.put(c.reader.getClassName() + '\000' + field, signature);
} else {
assert field == null && method == null;
// only add the signature as class name
forbiddenClasses.put(c.reader.getClassName(), signature);
}
}
/** Reads a list of API signatures. Closes the Reader when done (on Exception, too)! */
private void parseApiFile(Reader reader) throws IOException {
final BufferedReader r = new BufferedReader(reader);
try {
String line;
while ((line = r.readLine()) != null) {
line = line.trim();
if (line.length() == 0 || line.startsWith("#"))
continue;
addSignature(line);
}
} finally {
r.close();
}
}
/** Parses a class given as (FileSet) Resource */
private ClassReader loadClassFromResource(final Resource res) throws BuildException {
try {
final InputStream stream = res.getInputStream();
try {
return new ClassReader(stream);
} finally {
stream.close();
}
} catch (IOException ioe) {
throw new BuildException("IO problem while reading class file " + res, ioe);
}
}
/** Parses a class given as Resource and checks for valid method invocations */
private int checkClass(final ClassReader reader) {
final int[] violations = new int[1];
reader.accept(new ClassVisitor(Opcodes.ASM4) {
final String className = Type.getObjectType(reader.getClassName()).getClassName();
String source = null;
@Override
public void visitSource(String source, String debug) {
this.source = source;
}
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
return new MethodVisitor(Opcodes.ASM4) {
private int lineNo = -1;
private ClassSignatureLookup lookupRelatedClass(String internalName) {
ClassSignatureLookup c = classesToCheck.get(internalName);
if (c == null) try {
c = getClassFromClassLoader(internalName);
} catch (BuildException be) {
// we ignore lookup errors and simply ignore this related class
c = null;
}
return c;
}
private boolean checkClassUse(String owner) {
final String printout = forbiddenClasses.get(owner);
if (printout != null) {
log("Forbidden class use: " + printout, Project.MSG_ERR);
return true;
}
return false;
}
private boolean checkMethodAccess(String owner, Method method) {
if (checkClassUse(owner)) {
return true;
}
final String printout = forbiddenMethods.get(owner + '\000' + method);
if (printout != null) {
log("Forbidden method invocation: " + printout, Project.MSG_ERR);
return true;
}
final ClassSignatureLookup c = lookupRelatedClass(owner);
if (c != null && !c.methods.contains(method)) {
final String superName = c.reader.getSuperName();
if (superName != null && checkMethodAccess(superName, method)) {
return true;
}
final String[] interfaces = c.reader.getInterfaces();
if (interfaces != null) {
for (String intf : interfaces) {
if (intf != null && checkMethodAccess(intf, method)) {
return true;
}
}
}
}
return false;
}
private boolean checkFieldAccess(String owner, String field) {
if (checkClassUse(owner)) {
return true;
}
final String printout = forbiddenFields.get(owner + '\000' + field);
if (printout != null) {
log("Forbidden field access: " + printout, Project.MSG_ERR);
return true;
}
final ClassSignatureLookup c = lookupRelatedClass(owner);
if (c != null && !c.fields.contains(field)) {
final String superName = c.reader.getSuperName();
if (superName != null && checkFieldAccess(superName, field)) {
return true;
}
final String[] interfaces = c.reader.getInterfaces();
if (interfaces != null) {
for (String intf : interfaces) {
if (intf != null && checkFieldAccess(intf, field)) {
return true;
}
}
}
}
return false;
}
@Override
public void visitMethodInsn(int opcode, String owner, String name, String desc) {
if (checkMethodAccess(owner, new Method(name, desc))) {
violations[0]++;
reportSourceAndLine();
}
}
@Override
public void visitFieldInsn(int opcode, String owner, String name, String desc) {
if (checkFieldAccess(owner, name)) {
violations[0]++;
reportSourceAndLine();
}
}
private void reportSourceAndLine() {
final StringBuilder sb = new StringBuilder(" in ").append(className);
if (source != null && lineNo >= 0) {
new Formatter(sb, Locale.ROOT).format(" (%s:%d)", source, lineNo).flush();
}
log(sb.toString(), Project.MSG_ERR);
}
@Override
public void visitLineNumber(int lineNo, Label start) {
this.lineNo = lineNo;
}
};
}
}, ClassReader.SKIP_FRAMES);
return violations[0];
}
@Override
public void execute() throws BuildException {
AntClassLoader antLoader = null;
try {
if (classpath != null) {
classpath.setProject(getProject());
this.loader = antLoader = getProject().createClassLoader(ClassLoader.getSystemClassLoader(), classpath);
// force that loading from this class loader is done first, then parent is asked.
// This violates spec, but prevents classes in any system classpath to be used if a local one is available:
antLoader.setParentFirst(false);
} else {
this.loader = ClassLoader.getSystemClassLoader();
}
classFiles.setProject(getProject());
apiSignatures.setProject(getProject());
final long start = System.currentTimeMillis();
// check if we can load runtime classes (e.g. java.lang.String).
// If this fails, we have a newer Java version than ASM supports:
try {
getClassFromClassLoader(String.class.getName());
} catch (IllegalArgumentException iae) {
final String msg = String.format(Locale.ROOT,
"Your Java version (%s) is not supported by <%s/>. Please run the checks with a supported JDK!",
System.getProperty("java.version"), getTaskName());
if (failOnUnsupportedJava) {
throw new BuildException(msg);
} else {
log("WARNING: " + msg, Project.MSG_WARN);
return;
}
}
try {
@SuppressWarnings("unchecked")
Iterator<Resource> iter = (Iterator<Resource>) apiSignatures.iterator();
if (!iter.hasNext()) {
throw new BuildException("You need to supply at least one API signature definition through apiFile=, <apiFileSet/>, or inner text.");
}
while (iter.hasNext()) {
final Resource r = iter.next();
if (!r.isExists()) {
throw new BuildException("Resource does not exist: " + r);
}
if (r instanceof StringResource) {
final String s = ((StringResource) r).getValue();
if (s != null && s.trim().length() > 0) {
log("Reading inline API signatures...", Project.MSG_INFO);
parseApiFile(new StringReader(s));
}
} else {
log("Reading API signatures: " + r, Project.MSG_INFO);
parseApiFile(new InputStreamReader(r.getInputStream(), "UTF-8"));
}
}
} catch (IOException ioe) {
throw new BuildException("IO problem while reading files with API signatures.", ioe);
}
if (forbiddenMethods.isEmpty() && forbiddenClasses.isEmpty()) {
throw new BuildException("No API signatures found; use apiFile=, <apiFileSet/>, or inner text to define those!");
}
log("Loading classes to check...", Project.MSG_INFO);
@SuppressWarnings("unchecked")
Iterator<Resource> iter = (Iterator<Resource>) classFiles.iterator();
if (!iter.hasNext()) {
throw new BuildException("There is no <fileset/> given or the fileset does not contain any class files to check.");
}
while (iter.hasNext()) {
final Resource r = iter.next();
if (!r.isExists()) {
throw new BuildException("Class file does not exist: " + r);
}
ClassReader reader = loadClassFromResource(r);
classesToCheck.put(reader.getClassName(), new ClassSignatureLookup(reader));
}
log("Scanning for API signatures and dependencies...", Project.MSG_INFO);
int errors = 0;
for (final ClassSignatureLookup c : classesToCheck.values()) {
errors += checkClass(c.reader);
}
log(String.format(Locale.ROOT,
"Scanned %d (and %d related) class file(s) for forbidden API invocations (in %.2fs), %d error(s).",
classesToCheck.size(), classpathClassCache.size(), (System.currentTimeMillis() - start) / 1000.0, errors),
errors > 0 ? Project.MSG_ERR : Project.MSG_INFO);
if (errors > 0) {
throw new BuildException("Check for forbidden API calls failed, see log.");
}
} finally {
this.loader = null;
if (antLoader != null) antLoader.cleanup();
antLoader = null;
classesToCheck.clear();
classpathClassCache.clear();
forbiddenFields.clear();
forbiddenMethods.clear();
forbiddenClasses.clear();
}
}
/** Set of class files to check */
public void add(ResourceCollection rc) {
classFiles.add(rc);
}
/** A file with API signatures apiFile= attribute */
public void setApiFile(File file) {
apiSignatures.add(new FileResource(getProject(), file));
}
/** Set of files with API signatures as <apiFileSet/> nested element */
public FileSet createApiFileSet() {
final FileSet fs = new FileSet();
fs.setProject(getProject());
apiSignatures.add(fs);
return fs;
}
/** Support for API signatures list as nested text */
public void addText(String text) {
apiSignatures.add(new StringResource(getProject(), text));
}
/** Classpath as classpath= attribute */
public void setClasspath(Path classpath) {
createClasspath().append(classpath);
}
/** Classpath as classpathRef= attribute */
public void setClasspathRef(Reference r) {
createClasspath().setRefid(r);
}
/** Classpath as <classpath/> nested element */
public Path createClasspath() {
if (this.classpath == null) {
this.classpath = new Path(getProject());
}
return this.classpath.createPath();
}
public void setFailOnUnsupportedJava(boolean failOnUnsupportedJava) {
this.failOnUnsupportedJava = failOnUnsupportedJava;
}
static final class ClassSignatureLookup {
public final ClassReader reader;
public final Set<Method> methods;
public final Set<String> fields;
public ClassSignatureLookup(final ClassReader reader) {
this.reader = reader;
final Set<Method> methods = new HashSet<Method>();
final Set<String> fields = new HashSet<String>();
reader.accept(new ClassVisitor(Opcodes.ASM4) {
@Override
public MethodVisitor visitMethod(int access, String name, String desc, String signature, String[] exceptions) {
final Method m = new Method(name, desc);
methods.add(m);
return null;
}
@Override
public FieldVisitor visitField(int access, String name, String desc, String signature, Object value) {
fields.add(name);
return null;
}
}, ClassReader.SKIP_CODE | ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES);
this.methods = Collections.unmodifiableSet(methods);
this.fields = Collections.unmodifiableSet(fields);
}
}
}

View File

@ -236,19 +236,29 @@
</license-check-macro>
</target>
<target name="check-forbidden-apis" depends="compile-tools,compile-test,load-custom-tasks,-check-forbidden-java-apis,-check-forbidden-test-apis"
<target name="check-forbidden-apis" depends="compile-tools,compile-test,install-forbidden-apis,-forbidden-apis-classpath,-check-forbidden-java-apis,-check-forbidden-test-apis"
description="Check forbidden API calls in compiled class files."/>
<!-- TODO: Make the forbidden API checks per module! Also remove failOnMissingClasses="false" below!!! -->
<target name="-forbidden-apis-classpath">
<path id="forbidden-apis.classpath">
<fileset dir="${basedir}" includes="**/lib/*.jar"/>
<dirset dir="${basedir}/build" includes="**/classes/*"/>
<path refid="solr.lucene.libs"/>
<path refid="classpath"/>
<path refid="test.classpath"/>
</path>
</target>
<target name="-check-forbidden-java-apis">
<forbidden-apis>
<classpath refid="additional.dependencies"/>
<apiFileSet dir="${custom-tasks.dir}/forbiddenApis">
<include name="jdk.txt" />
<include name="jdk-deprecated.txt" />
<include name="commons-io.txt" />
<forbidden-apis internalRuntimeForbidden="true" classpathref="forbidden-apis.classpath" failOnMissingClasses="false">
<bundledSignatures name="jdk-unsafe-${javac.target}"/>
<bundledSignatures name="jdk-deprecated-${javac.target}"/>
<bundledSignatures name="commons-io-unsafe-${commons-io.version}"/>
<signaturesFileSet dir="${common.dir}/tools/forbiddenApis">
<include name="executors.txt" />
<include name="servlet-api.txt" />
</apiFileSet>
</signaturesFileSet>
<fileset dir="${basedir}/build">
<include name="**/*.class" />
<!-- violates the servlet-api restrictions, but it is safe to do so in this test: -->
@ -258,8 +268,7 @@
</target>
<target name="-check-forbidden-test-apis">
<forbidden-apis apiFile="${custom-tasks.dir}/forbiddenApis/tests.txt">
<classpath refid="junit-path"/>
<forbidden-apis signaturesFile="${common.dir}/tools/forbiddenApis/tests.txt" classpathref="forbidden-apis.classpath" failOnMissingClasses="false">
<fileset dir="${basedir}/build">
<include name="**/classes/test/**/*.class"/>
<include name="solr-test-framework/**/*.class"/>

View File

@ -68,6 +68,10 @@
where X.Y.M is the last version released (on this branch).
-->
<property name="solr.spec.version" value="5.0.0.${dateversion}" />
<!-- defined here to be able to make the forbidden-api checker correctly
reference it. 'ivy.xml' is also referencing this property. -->
<property name="commons-io.version" value="2.1" />
<path id="solr.lucene.libs">
<!-- List of jars that will be used as the foundation for both

View File

@ -300,7 +300,8 @@
<fieldType name="uima_sentences" class="solr.TextField" positionIncrementGap="100">
<analyzer>
<tokenizer class="solr.UIMAAnnotationsTokenizerFactory"
descriptorPath="/uima/AggregateSentenceAE.xml" tokenType="org.apache.uima.SentenceAnnotation"/>
descriptorPath="/uima/AggregateSentenceAE.xml" tokenType="org.apache.uima.SentenceAnnotation"
ngramsize="2"/>
</analyzer>
</fieldType>

View File

@ -1220,35 +1220,43 @@ public final class ZkController {
if (!isLeader && !SKIP_AUTO_RECOVERY) {
HttpSolrServer server = null;
server = new HttpSolrServer(leaderBaseUrl);
server.setConnectionTimeout(45000);
server.setSoTimeout(120000);
WaitForState prepCmd = new WaitForState();
prepCmd.setCoreName(leaderCoreName);
prepCmd.setNodeName(getNodeName());
prepCmd.setCoreNodeName(coreZkNodeName);
prepCmd.setState(ZkStateReader.DOWN);
// let's retry a couple times - perhaps the leader just went down,
// or perhaps he is just not quite ready for us yet
retries = 6;
for (int i = 0; i < retries; i++) {
try {
server.request(prepCmd);
break;
} catch (Exception e) {
SolrException.log(log, "There was a problem making a request to the leader", e);
try {
Thread.sleep(2000);
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
try {
server.setConnectionTimeout(15000);
server.setSoTimeout(120000);
WaitForState prepCmd = new WaitForState();
prepCmd.setCoreName(leaderCoreName);
prepCmd.setNodeName(getNodeName());
prepCmd.setCoreNodeName(coreZkNodeName);
prepCmd.setState(ZkStateReader.DOWN);
// let's retry a couple times - perhaps the leader just went down,
// or perhaps he is just not quite ready for us yet
retries = 6;
for (int i = 0; i < retries; i++) {
if (isClosed) {
throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE,
"We have been closed");
}
if (i == retries - 1) {
throw new SolrException(ErrorCode.SERVER_ERROR, "There was a problem making a request to the leader");
try {
server.request(prepCmd);
break;
} catch (Exception e) {
SolrException.log(log,
"There was a problem making a request to the leader", e);
try {
Thread.sleep(2000);
} catch (InterruptedException e1) {
Thread.currentThread().interrupt();
}
if (i == retries - 1) {
throw new SolrException(ErrorCode.SERVER_ERROR,
"There was a problem making a request to the leader");
}
}
}
} finally {
server.shutdown();
}
server.shutdown();
}
return leaderProps;
}

View File

@ -73,7 +73,6 @@ import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.IndexOutput;
import org.apache.solr.client.solrj.SolrServer;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
@ -86,8 +85,8 @@ import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.common.util.FastInputStream;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.core.CachingDirectoryFactory.CloseListener;
import org.apache.solr.core.DirectoryFactory.DirContext;
import org.apache.solr.core.DirectoryFactory;
import org.apache.solr.core.DirectoryFactory.DirContext;
import org.apache.solr.core.IndexDeletionPolicyWrapper;
import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.ReplicationHandler.FileInfo;
@ -244,7 +243,9 @@ public class SnapPuller {
params.set(CommonParams.WT, "javabin");
params.set(CommonParams.QT, "/replication");
QueryRequest req = new QueryRequest(params);
SolrServer server = new HttpSolrServer(masterUrl, myHttpClient); //XXX modify to use shardhandler
HttpSolrServer server = new HttpSolrServer(masterUrl, myHttpClient); //XXX modify to use shardhandler
server.setSoTimeout(60000);
server.setConnectionTimeout(15000);
try {
return server.request(req);
} catch (SolrServerException e) {
@ -262,7 +263,9 @@ public class SnapPuller {
params.set(CommonParams.WT, "javabin");
params.set(CommonParams.QT, "/replication");
QueryRequest req = new QueryRequest(params);
SolrServer server = new HttpSolrServer(masterUrl, myHttpClient); //XXX modify to use shardhandler
HttpSolrServer server = new HttpSolrServer(masterUrl, myHttpClient); //XXX modify to use shardhandler
server.setSoTimeout(60000);
server.setConnectionTimeout(15000);
try {
NamedList response = server.request(req);
@ -1237,7 +1240,9 @@ public class SnapPuller {
* Open a new stream using HttpClient
*/
FastInputStream getStream() throws IOException {
SolrServer s = new HttpSolrServer(masterUrl, myHttpClient, null); //XXX use shardhandler
HttpSolrServer s = new HttpSolrServer(masterUrl, myHttpClient, null); //XXX use shardhandler
s.setSoTimeout(60000);
s.setConnectionTimeout(15000);
ModifiableSolrParams params = new ModifiableSolrParams();
// //the method is command=filecontent
@ -1496,7 +1501,9 @@ public class SnapPuller {
* Open a new stream using HttpClient
*/
FastInputStream getStream() throws IOException {
SolrServer s = new HttpSolrServer(masterUrl, myHttpClient, null); //XXX use shardhandler
HttpSolrServer s = new HttpSolrServer(masterUrl, myHttpClient, null); //XXX use shardhandler
s.setSoTimeout(60000);
s.setConnectionTimeout(15000);
ModifiableSolrParams params = new ModifiableSolrParams();
// //the method is command=filecontent
@ -1549,7 +1556,9 @@ public class SnapPuller {
params.set(COMMAND, CMD_DETAILS);
params.set("slave", false);
params.set(CommonParams.QT, "/replication");
SolrServer server = new HttpSolrServer(masterUrl, myHttpClient); //XXX use shardhandler
HttpSolrServer server = new HttpSolrServer(masterUrl, myHttpClient); //XXX use shardhandler
server.setSoTimeout(60000);
server.setConnectionTimeout(15000);
QueryRequest request = new QueryRequest(params);
return server.request(request);
}

View File

@ -495,6 +495,8 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
DirectUpdateHandler2.commitOnClose = false;
HttpSolrServer addClient = new HttpSolrServer(url3 + "/unloadcollection3");
addClient.setConnectionTimeout(15000);
addClient.setSoTimeout(30000);
// add a few docs
for (int x = 20; x < 100; x++) {
SolrInputDocument doc1 = getDoc(id, x, i1, -600, tlong, 600, t1,
@ -507,6 +509,8 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
// unload the leader
collectionClient = new HttpSolrServer(leaderProps.getBaseUrl());
collectionClient.setConnectionTimeout(15000);
collectionClient.setSoTimeout(30000);
Unload unloadCmd = new Unload(false);
unloadCmd.setCoreName(leaderProps.getCoreName());
@ -529,6 +533,9 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
zkStateReader.getLeaderRetry("unloadcollection", "shard1", 15000);
addClient = new HttpSolrServer(url2 + "/unloadcollection2");
addClient.setConnectionTimeout(15000);
addClient.setSoTimeout(30000);
// add a few docs while the leader is down
for (int x = 101; x < 200; x++) {
SolrInputDocument doc1 = getDoc(id, x, i1, -600, tlong, 600, t1,
@ -541,6 +548,8 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
client = clients.get(3);
String url4 = getBaseUrl(client);
server = new HttpSolrServer(url4);
server.setConnectionTimeout(15000);
server.setSoTimeout(30000);
createCmd = new Create();
createCmd.setCoreName("unloadcollection4");
@ -554,6 +563,8 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
// unload the leader again
leaderProps = getLeaderUrlFromZk("unloadcollection", "shard1");
collectionClient = new HttpSolrServer(leaderProps.getBaseUrl());
collectionClient.setConnectionTimeout(15000);
collectionClient.setSoTimeout(30000);
unloadCmd = new Unload(false);
unloadCmd.setCoreName(leaderProps.getCoreName());
@ -576,6 +587,8 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
// bring the downed leader back as replica
server = new HttpSolrServer(leaderProps.getBaseUrl());
server.setConnectionTimeout(15000);
server.setSoTimeout(30000);
createCmd = new Create();
createCmd.setCoreName(leaderProps.getCoreName());
@ -585,20 +598,23 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
waitForRecoveriesToFinish("unloadcollection", zkStateReader, false);
server = new HttpSolrServer(url1 + "/unloadcollection");
// System.out.println(server.query(new SolrQuery("*:*")).getResults().getNumFound());
server = new HttpSolrServer(url2 + "/unloadcollection");
server.setConnectionTimeout(15000);
server.setSoTimeout(30000);
server.commit();
SolrQuery q = new SolrQuery("*:*");
q.set("distrib", false);
long found1 = server.query(q).getResults().getNumFound();
server = new HttpSolrServer(url3 + "/unloadcollection");
server.setConnectionTimeout(15000);
server.setSoTimeout(30000);
server.commit();
q = new SolrQuery("*:*");
q.set("distrib", false);
long found3 = server.query(q).getResults().getNumFound();
server = new HttpSolrServer(url4 + "/unloadcollection");
server.setConnectionTimeout(15000);
server.setSoTimeout(30000);
server.commit();
q = new SolrQuery("*:*");
q.set("distrib", false);
@ -656,7 +672,7 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
String url3 = getBaseUrl(client);
final HttpSolrServer server = new HttpSolrServer(url3);
server.setConnectionTimeout(15000);
server.setSoTimeout(30000);
server.setSoTimeout(60000);
ThreadPoolExecutor executor = new ThreadPoolExecutor(0, Integer.MAX_VALUE,
5, TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
new DefaultSolrThreadFactory("testExecutor"));
@ -1045,6 +1061,8 @@ public class BasicDistributedZkTest extends AbstractFullDistribZkTestBase {
// now test that unloading a core gets us a new leader
HttpSolrServer server = new HttpSolrServer(baseUrl);
server.setConnectionTimeout(15000);
server.setSoTimeout(30000);
Unload unloadCmd = new Unload(true);
unloadCmd.setCoreName(props.getCoreName());

View File

@ -24,16 +24,17 @@ import org.apache.lucene.util.LuceneTestCase.BadApple;
import org.apache.lucene.util.LuceneTestCase.Slow;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.core.Diagnostics;
import org.apache.solr.core.SolrCore;
import org.apache.solr.servlet.SolrDispatchFilter;
import org.apache.solr.update.DirectUpdateHandler2;
import org.apache.solr.update.SolrCmdDistributor;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
@Slow
@BadApple
public class ChaosMonkeySafeLeaderTest extends AbstractFullDistribZkTestBase {
private static final int BASE_RUN_LENGTH = 120000;
@ -41,12 +42,21 @@ public class ChaosMonkeySafeLeaderTest extends AbstractFullDistribZkTestBase {
@BeforeClass
public static void beforeSuperClass() {
SolrCmdDistributor.testing_errorHook = new Diagnostics.Callable() {
@Override
public void call(Object... data) {
SolrCmdDistributor.Request sreq = (SolrCmdDistributor.Request)data[1];
if (sreq.exception == null) return;
if (sreq.exception.getMessage().contains("Timeout")) {
Diagnostics.logThreadDumps("REQUESTING THREAD DUMP DUE TO TIMEOUT: " + sreq.exception.getMessage());
}
}
};
}
@AfterClass
public static void afterSuperClass() {
SolrCmdDistributor.testing_errorHook = null;
}
@Before

View File

@ -90,7 +90,7 @@ public class RecoveryZkTest extends AbstractFullDistribZkTestBase {
waitForThingsToLevelOut(30);
Thread.sleep(1000);
Thread.sleep(5000);
waitForRecoveriesToFinish(DEFAULT_COLLECTION, zkStateReader, false, true);

View File

@ -24,7 +24,7 @@
<dependency org="org.apache.httpcomponents" name="httpcore" rev="4.1.4" transitive="false"/>
<dependency org="org.apache.httpcomponents" name="httpclient" rev="4.1.3" transitive="false"/>
<dependency org="org.apache.httpcomponents" name="httpmime" rev="4.1.3" transitive="false"/>
<dependency org="commons-io" name="commons-io" rev="2.1" transitive="false"/>
<dependency org="commons-io" name="commons-io" rev="${commons-io.version}" transitive="false"/>
<dependency org="org.slf4j" name="jcl-over-slf4j" rev="1.6.4" transitive="false"/>
<dependency org="org.codehaus.woodstox" name="wstx-asl" rev="3.2.7" transitive="false"/>
<dependency org="org.slf4j" name="slf4j-api" rev="1.6.4" transitive="false"/>