lucene4: fixed SimpleIndexQueryParserTests
This commit is contained in:
parent
db639e5c2e
commit
bf13f3f81e
|
@ -47,9 +47,10 @@ public class StringFieldMapper extends AbstractFieldMapper<String> implements Al
|
||||||
public static final String CONTENT_TYPE = "string";
|
public static final String CONTENT_TYPE = "string";
|
||||||
|
|
||||||
public static class Defaults extends AbstractFieldMapper.Defaults {
|
public static class Defaults extends AbstractFieldMapper.Defaults {
|
||||||
public static final FieldType STRING_FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.NUMBER_FIELD_TYPE);
|
public static final FieldType STRING_FIELD_TYPE = new FieldType(AbstractFieldMapper.Defaults.FIELD_TYPE);
|
||||||
|
|
||||||
static {
|
static {
|
||||||
|
STRING_FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_ONLY);
|
||||||
STRING_FIELD_TYPE.freeze();
|
STRING_FIELD_TYPE.freeze();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -180,57 +180,59 @@ public class MatchQuery {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Logic similar to QueryParser#getFieldQuery
|
// Logic similar to QueryParser#getFieldQuery
|
||||||
final TokenStream source;
|
TokenStream source = null;
|
||||||
|
CachingTokenFilter buffer = null;
|
||||||
|
CharTermAttribute termAtt = null;
|
||||||
|
PositionIncrementAttribute posIncrAtt = null;
|
||||||
|
boolean success = false;
|
||||||
try {
|
try {
|
||||||
source = analyzer.tokenStream(field, new FastStringReader(text));
|
source = analyzer.tokenStream(field, new FastStringReader(text));
|
||||||
source.reset();
|
source.reset();
|
||||||
|
success = true;
|
||||||
} catch(IOException ex) {
|
} catch(IOException ex) {
|
||||||
//LUCENE 4 UPGRADE not sure what todo here really lucene 3.6 had a tokenStream that didn't throw an exc.
|
//LUCENE 4 UPGRADE not sure what todo here really lucene 3.6 had a tokenStream that didn't throw an exc.
|
||||||
throw new ElasticSearchParseException("failed to process query", ex);
|
// success==false if we hit an exception
|
||||||
}
|
}
|
||||||
CachingTokenFilter buffer = new CachingTokenFilter(source);
|
|
||||||
CharTermAttribute termAtt = null;
|
|
||||||
PositionIncrementAttribute posIncrAtt = null;
|
|
||||||
int numTokens = 0;
|
int numTokens = 0;
|
||||||
|
int positionCount = 0;
|
||||||
|
boolean severalTokensAtSamePosition = false;
|
||||||
|
|
||||||
boolean success = false;
|
|
||||||
buffer.reset();
|
|
||||||
if (success) {
|
if (success) {
|
||||||
|
buffer = new CachingTokenFilter(source);
|
||||||
|
buffer.reset();
|
||||||
if (buffer.hasAttribute(CharTermAttribute.class)) {
|
if (buffer.hasAttribute(CharTermAttribute.class)) {
|
||||||
termAtt = buffer.getAttribute(CharTermAttribute.class);
|
termAtt = buffer.getAttribute(CharTermAttribute.class);
|
||||||
}
|
}
|
||||||
if (buffer.hasAttribute(PositionIncrementAttribute.class)) {
|
if (buffer.hasAttribute(PositionIncrementAttribute.class)) {
|
||||||
posIncrAtt = buffer.getAttribute(PositionIncrementAttribute.class);
|
posIncrAtt = buffer.getAttribute(PositionIncrementAttribute.class);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
int positionCount = 0;
|
boolean hasMoreTokens = false;
|
||||||
boolean severalTokensAtSamePosition = false;
|
if (termAtt != null) {
|
||||||
|
try {
|
||||||
boolean hasMoreTokens = false;
|
|
||||||
if (termAtt != null) {
|
|
||||||
try {
|
|
||||||
hasMoreTokens = buffer.incrementToken();
|
|
||||||
while (hasMoreTokens) {
|
|
||||||
numTokens++;
|
|
||||||
int positionIncrement = (posIncrAtt != null) ? posIncrAtt.getPositionIncrement() : 1;
|
|
||||||
if (positionIncrement != 0) {
|
|
||||||
positionCount += positionIncrement;
|
|
||||||
} else {
|
|
||||||
severalTokensAtSamePosition = true;
|
|
||||||
}
|
|
||||||
hasMoreTokens = buffer.incrementToken();
|
hasMoreTokens = buffer.incrementToken();
|
||||||
|
while (hasMoreTokens) {
|
||||||
|
numTokens++;
|
||||||
|
int positionIncrement = (posIncrAtt != null) ? posIncrAtt.getPositionIncrement() : 1;
|
||||||
|
if (positionIncrement != 0) {
|
||||||
|
positionCount += positionIncrement;
|
||||||
|
} else {
|
||||||
|
severalTokensAtSamePosition = true;
|
||||||
|
}
|
||||||
|
hasMoreTokens = buffer.incrementToken();
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
// ignore
|
||||||
}
|
}
|
||||||
} catch (IOException e) {
|
|
||||||
// ignore
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
try {
|
|
||||||
// rewind the buffer stream
|
// rewind the buffer stream
|
||||||
buffer.reset();
|
buffer.reset();
|
||||||
|
}
|
||||||
|
try {
|
||||||
// close original stream - all tokens buffered
|
// close original stream - all tokens buffered
|
||||||
source.close();
|
if (source != null) {
|
||||||
|
source.close();
|
||||||
|
}
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
// ignore
|
// ignore
|
||||||
}
|
}
|
||||||
|
|
|
@ -127,7 +127,7 @@ public class SimpleIndexQueryParserTests {
|
||||||
|
|
||||||
private BytesRef longToPrefixCoded(long val) {
|
private BytesRef longToPrefixCoded(long val) {
|
||||||
BytesRef bytesRef = new BytesRef();
|
BytesRef bytesRef = new BytesRef();
|
||||||
NumericUtils.longToPrefixCoded(val, 0, bytesRef);
|
NumericUtils.longToPrefixCoded(val, NumericUtils.PRECISION_STEP_DEFAULT, bytesRef);
|
||||||
return bytesRef;
|
return bytesRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue