mirror of https://github.com/apache/lucene.git
LUCENE-3590: clearly mark bogus deep-copying apis in BytesRef
git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1206143 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
e2cddbfd43
commit
3b6da22aa7
|
@ -177,7 +177,7 @@ public abstract class AbstractTestCase extends LuceneTestCase {
|
||||||
|
|
||||||
while (tokenStream.incrementToken()) {
|
while (tokenStream.incrementToken()) {
|
||||||
termAttribute.fillBytesRef();
|
termAttribute.fillBytesRef();
|
||||||
bytesRefs.add(new BytesRef(bytesRef));
|
bytesRefs.add(BytesRef.deepCopyOf(bytesRef));
|
||||||
}
|
}
|
||||||
|
|
||||||
tokenStream.end();
|
tokenStream.end();
|
||||||
|
|
|
@ -366,7 +366,7 @@ public class MemoryIndex {
|
||||||
ArrayIntList positions = terms.get(ref);
|
ArrayIntList positions = terms.get(ref);
|
||||||
if (positions == null) { // term not seen before
|
if (positions == null) { // term not seen before
|
||||||
positions = new ArrayIntList(stride);
|
positions = new ArrayIntList(stride);
|
||||||
terms.put(new BytesRef(ref), positions);
|
terms.put(BytesRef.deepCopyOf(ref), positions);
|
||||||
}
|
}
|
||||||
if (stride == 1) {
|
if (stride == 1) {
|
||||||
positions.add(pos);
|
positions.add(pos);
|
||||||
|
@ -874,7 +874,7 @@ public class MemoryIndex {
|
||||||
public boolean seekExact(BytesRef text, boolean useCache) {
|
public boolean seekExact(BytesRef text, boolean useCache) {
|
||||||
termUpto = Arrays.binarySearch(info.sortedTerms, text, termComparator);
|
termUpto = Arrays.binarySearch(info.sortedTerms, text, termComparator);
|
||||||
if (termUpto >= 0) {
|
if (termUpto >= 0) {
|
||||||
br.copy(info.sortedTerms[termUpto].getKey());
|
br.copyBytes(info.sortedTerms[termUpto].getKey());
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
return false;
|
return false;
|
||||||
|
@ -889,11 +889,11 @@ public class MemoryIndex {
|
||||||
if (termUpto >= info.sortedTerms.length) {
|
if (termUpto >= info.sortedTerms.length) {
|
||||||
return SeekStatus.END;
|
return SeekStatus.END;
|
||||||
} else {
|
} else {
|
||||||
br.copy(info.sortedTerms[termUpto].getKey());
|
br.copyBytes(info.sortedTerms[termUpto].getKey());
|
||||||
return SeekStatus.NOT_FOUND;
|
return SeekStatus.NOT_FOUND;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
br.copy(info.sortedTerms[termUpto].getKey());
|
br.copyBytes(info.sortedTerms[termUpto].getKey());
|
||||||
return SeekStatus.FOUND;
|
return SeekStatus.FOUND;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -910,7 +910,7 @@ public class MemoryIndex {
|
||||||
if (termUpto >= info.sortedTerms.length) {
|
if (termUpto >= info.sortedTerms.length) {
|
||||||
return null;
|
return null;
|
||||||
} else {
|
} else {
|
||||||
br.copy(info.sortedTerms[termUpto].getKey());
|
br.copyBytes(info.sortedTerms[termUpto].getKey());
|
||||||
return br;
|
return br;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,13 +26,13 @@ public final class TermStats {
|
||||||
public long totalTermFreq;
|
public long totalTermFreq;
|
||||||
|
|
||||||
TermStats(String field, BytesRef termtext, int df) {
|
TermStats(String field, BytesRef termtext, int df) {
|
||||||
this.termtext = new BytesRef(termtext);
|
this.termtext = BytesRef.deepCopyOf(termtext);
|
||||||
this.field = field;
|
this.field = field;
|
||||||
this.docFreq = df;
|
this.docFreq = df;
|
||||||
}
|
}
|
||||||
|
|
||||||
TermStats(String field, BytesRef termtext, int df, long tf) {
|
TermStats(String field, BytesRef termtext, int df, long tf) {
|
||||||
this.termtext = new BytesRef(termtext);
|
this.termtext = BytesRef.deepCopyOf(termtext);
|
||||||
this.field = field;
|
this.field = field;
|
||||||
this.docFreq = df;
|
this.docFreq = df;
|
||||||
this.totalTermFreq = tf;
|
this.totalTermFreq = tf;
|
||||||
|
|
|
@ -224,7 +224,7 @@ public class FuzzyLikeThisQuery extends Query
|
||||||
totalVariantDocFreqs+=fe.docFreq();
|
totalVariantDocFreqs+=fe.docFreq();
|
||||||
float score=boostAtt.getBoost();
|
float score=boostAtt.getBoost();
|
||||||
if (variantsQ.size() < MAX_VARIANTS_PER_TERM || score > minScore){
|
if (variantsQ.size() < MAX_VARIANTS_PER_TERM || score > minScore){
|
||||||
ScoreTerm st=new ScoreTerm(new Term(startTerm.field(), new BytesRef(possibleMatch)),score,startTerm);
|
ScoreTerm st=new ScoreTerm(new Term(startTerm.field(), BytesRef.deepCopyOf(possibleMatch)),score,startTerm);
|
||||||
variantsQ.insertWithOverflow(st);
|
variantsQ.insertWithOverflow(st);
|
||||||
minScore = variantsQ.top().score; // maintain minScore
|
minScore = variantsQ.top().score; // maintain minScore
|
||||||
}
|
}
|
||||||
|
|
|
@ -186,7 +186,7 @@ public final class NumericTokenStream extends TokenStream {
|
||||||
@Override
|
@Override
|
||||||
public void reflectWith(AttributeReflector reflector) {
|
public void reflectWith(AttributeReflector reflector) {
|
||||||
fillBytesRef();
|
fillBytesRef();
|
||||||
reflector.reflect(TermToBytesRefAttribute.class, "bytes", new BytesRef(bytes));
|
reflector.reflect(TermToBytesRefAttribute.class, "bytes", BytesRef.deepCopyOf(bytes));
|
||||||
reflector.reflect(NumericTermAttribute.class, "shift", shift);
|
reflector.reflect(NumericTermAttribute.class, "shift", shift);
|
||||||
reflector.reflect(NumericTermAttribute.class, "rawValue", getRawValue());
|
reflector.reflect(NumericTermAttribute.class, "rawValue", getRawValue());
|
||||||
reflector.reflect(NumericTermAttribute.class, "valueSize", valueSize);
|
reflector.reflect(NumericTermAttribute.class, "valueSize", valueSize);
|
||||||
|
|
|
@ -213,7 +213,7 @@ public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttr
|
||||||
// Do a deep clone
|
// Do a deep clone
|
||||||
t.termBuffer = new char[this.termLength];
|
t.termBuffer = new char[this.termLength];
|
||||||
System.arraycopy(this.termBuffer, 0, t.termBuffer, 0, this.termLength);
|
System.arraycopy(this.termBuffer, 0, t.termBuffer, 0, this.termLength);
|
||||||
t.bytes = new BytesRef(bytes);
|
t.bytes = BytesRef.deepCopyOf(bytes);
|
||||||
return t;
|
return t;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -256,7 +256,7 @@ public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttr
|
||||||
public void reflectWith(AttributeReflector reflector) {
|
public void reflectWith(AttributeReflector reflector) {
|
||||||
reflector.reflect(CharTermAttribute.class, "term", toString());
|
reflector.reflect(CharTermAttribute.class, "term", toString());
|
||||||
fillBytesRef();
|
fillBytesRef();
|
||||||
reflector.reflect(TermToBytesRefAttribute.class, "bytes", new BytesRef(bytes));
|
reflector.reflect(TermToBytesRefAttribute.class, "bytes", BytesRef.deepCopyOf(bytes));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -246,9 +246,9 @@ public class IndexDocValuesField extends Field implements PerDocFieldValues {
|
||||||
}
|
}
|
||||||
setDocValuesType(type);
|
setDocValuesType(type);
|
||||||
if (bytes == null) {
|
if (bytes == null) {
|
||||||
bytes = new BytesRef(value);
|
bytes = BytesRef.deepCopyOf(value);
|
||||||
} else {
|
} else {
|
||||||
bytes.copy(value);
|
bytes.copyBytes(value);
|
||||||
}
|
}
|
||||||
bytesComparator = comp;
|
bytesComparator = comp;
|
||||||
}
|
}
|
||||||
|
|
|
@ -116,7 +116,7 @@ class AutomatonTermsEnum extends FilteredTermsEnum {
|
||||||
return seekBytesRef;
|
return seekBytesRef;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
seekBytesRef.copy(term);
|
seekBytesRef.copyBytes(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
// seek to the next possible string;
|
// seek to the next possible string;
|
||||||
|
|
|
@ -455,7 +455,7 @@ class BufferedDeletesStream {
|
||||||
assert lastDeleteTerm == null || term.compareTo(lastDeleteTerm) > 0: "lastTerm=" + lastDeleteTerm + " vs term=" + term;
|
assert lastDeleteTerm == null || term.compareTo(lastDeleteTerm) > 0: "lastTerm=" + lastDeleteTerm + " vs term=" + term;
|
||||||
}
|
}
|
||||||
// TODO: we re-use term now in our merged iterable, but we shouldn't clone, instead copy for this assert
|
// TODO: we re-use term now in our merged iterable, but we shouldn't clone, instead copy for this assert
|
||||||
lastDeleteTerm = term == null ? null : new Term(term.field(), new BytesRef(term.bytes));
|
lastDeleteTerm = term == null ? null : new Term(term.field(), BytesRef.deepCopyOf(term.bytes));
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -726,12 +726,12 @@ public class CheckIndex {
|
||||||
// make sure terms arrive in order according to
|
// make sure terms arrive in order according to
|
||||||
// the comp
|
// the comp
|
||||||
if (lastTerm == null) {
|
if (lastTerm == null) {
|
||||||
lastTerm = new BytesRef(term);
|
lastTerm = BytesRef.deepCopyOf(term);
|
||||||
} else {
|
} else {
|
||||||
if (termComp.compare(lastTerm, term) >= 0) {
|
if (termComp.compare(lastTerm, term) >= 0) {
|
||||||
throw new RuntimeException("terms out of order: lastTerm=" + lastTerm + " term=" + term);
|
throw new RuntimeException("terms out of order: lastTerm=" + lastTerm + " term=" + term);
|
||||||
}
|
}
|
||||||
lastTerm.copy(term);
|
lastTerm.copyBytes(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
final int docFreq = termsEnum.docFreq();
|
final int docFreq = termsEnum.docFreq();
|
||||||
|
@ -977,7 +977,7 @@ public class CheckIndex {
|
||||||
for(int i=seekCount-1;i>=0;i--) {
|
for(int i=seekCount-1;i>=0;i--) {
|
||||||
long ord = i*(termCount/seekCount);
|
long ord = i*(termCount/seekCount);
|
||||||
termsEnum.seekExact(ord);
|
termsEnum.seekExact(ord);
|
||||||
seekTerms[i] = new BytesRef(termsEnum.term());
|
seekTerms[i] = BytesRef.deepCopyOf(termsEnum.term());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Seek by term
|
// Seek by term
|
||||||
|
|
|
@ -221,7 +221,7 @@ public class DocTermOrds {
|
||||||
protected void uninvert(final IndexReader reader, final BytesRef termPrefix) throws IOException {
|
protected void uninvert(final IndexReader reader, final BytesRef termPrefix) throws IOException {
|
||||||
//System.out.println("DTO uninvert field=" + field + " prefix=" + termPrefix);
|
//System.out.println("DTO uninvert field=" + field + " prefix=" + termPrefix);
|
||||||
final long startTime = System.currentTimeMillis();
|
final long startTime = System.currentTimeMillis();
|
||||||
prefix = termPrefix == null ? null : new BytesRef(termPrefix);
|
prefix = termPrefix == null ? null : BytesRef.deepCopyOf(termPrefix);
|
||||||
|
|
||||||
final int maxDoc = reader.maxDoc();
|
final int maxDoc = reader.maxDoc();
|
||||||
final int[] index = new int[maxDoc]; // immediate term numbers, or the index into the byte[] representing the last number
|
final int[] index = new int[maxDoc]; // immediate term numbers, or the index into the byte[] representing the last number
|
||||||
|
|
|
@ -201,7 +201,7 @@ public final class MultiTermsEnum extends TermsEnum {
|
||||||
seekOpt = true;
|
seekOpt = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
lastSeekScratch.copy(term);
|
lastSeekScratch.copyBytes(term);
|
||||||
lastSeek = lastSeekScratch;
|
lastSeek = lastSeekScratch;
|
||||||
|
|
||||||
for(int i=0;i<numSubs;i++) {
|
for(int i=0;i<numSubs;i++) {
|
||||||
|
|
|
@ -111,7 +111,7 @@ class PrefixCodedTerms implements Iterable<Term> {
|
||||||
}
|
}
|
||||||
output.writeVInt(suffix);
|
output.writeVInt(suffix);
|
||||||
output.writeBytes(term.bytes.bytes, term.bytes.offset + prefix, suffix);
|
output.writeBytes(term.bytes.bytes, term.bytes.offset + prefix, suffix);
|
||||||
lastTerm.bytes.copy(term.bytes);
|
lastTerm.bytes.copyBytes(term.bytes);
|
||||||
lastTerm.field = term.field;
|
lastTerm.field = term.field;
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
|
|
@ -85,7 +85,7 @@ public class BlockTermsReader extends FieldsProducer {
|
||||||
|
|
||||||
public FieldAndTerm(FieldAndTerm other) {
|
public FieldAndTerm(FieldAndTerm other) {
|
||||||
field = other.field;
|
field = other.field;
|
||||||
term = new BytesRef(other.term);
|
term = BytesRef.deepCopyOf(other.term);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -437,7 +437,7 @@ public class BlockTermsReader extends FieldsProducer {
|
||||||
state.ord = indexEnum.ord()-1;
|
state.ord = indexEnum.ord()-1;
|
||||||
}
|
}
|
||||||
|
|
||||||
term.copy(indexEnum.term());
|
term.copyBytes(indexEnum.term());
|
||||||
//System.out.println(" seek: term=" + term.utf8ToString());
|
//System.out.println(" seek: term=" + term.utf8ToString());
|
||||||
} else {
|
} else {
|
||||||
//System.out.println(" skip seek");
|
//System.out.println(" skip seek");
|
||||||
|
@ -720,7 +720,7 @@ public class BlockTermsReader extends FieldsProducer {
|
||||||
state.copyFrom(otherState);
|
state.copyFrom(otherState);
|
||||||
seekPending = true;
|
seekPending = true;
|
||||||
indexIsCurrent = false;
|
indexIsCurrent = false;
|
||||||
term.copy(target);
|
term.copyBytes(target);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -757,7 +757,7 @@ public class BlockTermsReader extends FieldsProducer {
|
||||||
|
|
||||||
state.ord = indexEnum.ord()-1;
|
state.ord = indexEnum.ord()-1;
|
||||||
assert state.ord >= -1: "ord=" + state.ord;
|
assert state.ord >= -1: "ord=" + state.ord;
|
||||||
term.copy(indexEnum.term());
|
term.copyBytes(indexEnum.term());
|
||||||
|
|
||||||
// Now, scan:
|
// Now, scan:
|
||||||
int left = (int) (ord - state.ord);
|
int left = (int) (ord - state.ord);
|
||||||
|
|
|
@ -225,7 +225,7 @@ public class BlockTermsWriter extends FieldsConsumer {
|
||||||
pendingTerms = newArray;
|
pendingTerms = newArray;
|
||||||
}
|
}
|
||||||
final TermEntry te = pendingTerms[pendingCount];
|
final TermEntry te = pendingTerms[pendingCount];
|
||||||
te.term.copy(text);
|
te.term.copyBytes(text);
|
||||||
te.stats = stats;
|
te.stats = stats;
|
||||||
|
|
||||||
pendingCount++;
|
pendingCount++;
|
||||||
|
@ -312,7 +312,7 @@ public class BlockTermsWriter extends FieldsConsumer {
|
||||||
bytesWriter.reset();
|
bytesWriter.reset();
|
||||||
|
|
||||||
postingsWriter.flushTermsBlock(pendingCount, pendingCount);
|
postingsWriter.flushTermsBlock(pendingCount, pendingCount);
|
||||||
lastPrevTerm.copy(pendingTerms[pendingCount-1].term);
|
lastPrevTerm.copyBytes(pendingTerms[pendingCount-1].term);
|
||||||
pendingCount = 0;
|
pendingCount = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -785,7 +785,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
|
||||||
|
|
||||||
// only for assert:
|
// only for assert:
|
||||||
private boolean setSavedStartTerm(BytesRef startTerm) {
|
private boolean setSavedStartTerm(BytesRef startTerm) {
|
||||||
savedStartTerm = startTerm == null ? null : new BytesRef(startTerm);
|
savedStartTerm = startTerm == null ? null : BytesRef.deepCopyOf(startTerm);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1847,7 +1847,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
|
||||||
|
|
||||||
final SeekStatus result = currentFrame.scanToTerm(target, false);
|
final SeekStatus result = currentFrame.scanToTerm(target, false);
|
||||||
if (result == SeekStatus.END) {
|
if (result == SeekStatus.END) {
|
||||||
term.copy(target);
|
term.copyBytes(target);
|
||||||
termExists = false;
|
termExists = false;
|
||||||
|
|
||||||
if (next() != null) {
|
if (next() != null) {
|
||||||
|
@ -1900,7 +1900,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
|
||||||
final SeekStatus result = currentFrame.scanToTerm(target, false);
|
final SeekStatus result = currentFrame.scanToTerm(target, false);
|
||||||
|
|
||||||
if (result == SeekStatus.END) {
|
if (result == SeekStatus.END) {
|
||||||
term.copy(target);
|
term.copyBytes(target);
|
||||||
termExists = false;
|
termExists = false;
|
||||||
if (next() != null) {
|
if (next() != null) {
|
||||||
//if (DEBUG) {
|
//if (DEBUG) {
|
||||||
|
@ -2120,7 +2120,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
|
||||||
assert otherState != null && otherState instanceof BlockTermState;
|
assert otherState != null && otherState instanceof BlockTermState;
|
||||||
currentFrame = staticFrame;
|
currentFrame = staticFrame;
|
||||||
currentFrame.state.copyFrom(otherState);
|
currentFrame.state.copyFrom(otherState);
|
||||||
term.copy(target);
|
term.copyBytes(target);
|
||||||
currentFrame.metaDataUpto = currentFrame.getTermBlockOrd();
|
currentFrame.metaDataUpto = currentFrame.getTermBlockOrd();
|
||||||
assert currentFrame.metaDataUpto > 0;
|
assert currentFrame.metaDataUpto > 0;
|
||||||
validIndexPrefix = 0;
|
validIndexPrefix = 0;
|
||||||
|
|
|
@ -860,7 +860,7 @@ public class BlockTreeTermsWriter extends FieldsConsumer {
|
||||||
//if (DEBUG) System.out.println("BTTW.finishTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment + " df=" + stats.docFreq);
|
//if (DEBUG) System.out.println("BTTW.finishTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment + " df=" + stats.docFreq);
|
||||||
|
|
||||||
blockBuilder.add(text, noOutputs.getNoOutput());
|
blockBuilder.add(text, noOutputs.getNoOutput());
|
||||||
pending.add(new PendingTerm(new BytesRef(text), stats));
|
pending.add(new PendingTerm(BytesRef.deepCopyOf(text), stats));
|
||||||
postingsWriter.finishTerm(stats);
|
postingsWriter.finishTerm(stats);
|
||||||
numTerms++;
|
numTerms++;
|
||||||
}
|
}
|
||||||
|
|
|
@ -447,7 +447,7 @@ public class DefaultTermVectorsReader extends TermVectorsReader {
|
||||||
if (nextTerm >= numTerms) {
|
if (nextTerm >= numTerms) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
term.copy(lastTerm);
|
term.copyBytes(lastTerm);
|
||||||
final int start = tvf.readVInt();
|
final int start = tvf.readVInt();
|
||||||
final int deltaLen = tvf.readVInt();
|
final int deltaLen = tvf.readVInt();
|
||||||
term.length = start + deltaLen;
|
term.length = start + deltaLen;
|
||||||
|
@ -477,7 +477,7 @@ public class DefaultTermVectorsReader extends TermVectorsReader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
lastTerm.copy(term);
|
lastTerm.copyBytes(term);
|
||||||
nextTerm++;
|
nextTerm++;
|
||||||
return term;
|
return term;
|
||||||
}
|
}
|
||||||
|
|
|
@ -132,7 +132,7 @@ public final class DefaultTermVectorsWriter extends TermVectorsWriter {
|
||||||
tvf.writeVInt(suffix);
|
tvf.writeVInt(suffix);
|
||||||
tvf.writeBytes(term.bytes, term.offset + prefix, suffix);
|
tvf.writeBytes(term.bytes, term.offset + prefix, suffix);
|
||||||
tvf.writeVInt(freq);
|
tvf.writeVInt(freq);
|
||||||
lastTerm.copy(term);
|
lastTerm.copyBytes(term);
|
||||||
lastPosition = lastOffset = 0;
|
lastPosition = lastOffset = 0;
|
||||||
|
|
||||||
if (offsets && positions) {
|
if (offsets && positions) {
|
||||||
|
|
|
@ -141,7 +141,7 @@ public class FixedGapTermsIndexWriter extends TermsIndexWriterBase {
|
||||||
if (0 == numTerms % termIndexInterval) {
|
if (0 == numTerms % termIndexInterval) {
|
||||||
// save last term just before next index term so we
|
// save last term just before next index term so we
|
||||||
// can compute wasted suffix
|
// can compute wasted suffix
|
||||||
lastTerm.copy(text);
|
lastTerm.copyBytes(text);
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -172,7 +172,7 @@ public class FixedGapTermsIndexWriter extends TermsIndexWriterBase {
|
||||||
termLengths[numIndexTerms] = (short) indexedTermLength;
|
termLengths[numIndexTerms] = (short) indexedTermLength;
|
||||||
totTermLength += indexedTermLength;
|
totTermLength += indexedTermLength;
|
||||||
|
|
||||||
lastTerm.copy(text);
|
lastTerm.copyBytes(text);
|
||||||
numIndexTerms++;
|
numIndexTerms++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -241,7 +241,7 @@ public class VariableGapTermsIndexWriter extends TermsIndexWriterBase {
|
||||||
//System.out.println(" YES");
|
//System.out.println(" YES");
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
lastTerm.copy(text);
|
lastTerm.copyBytes(text);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -260,7 +260,7 @@ public class VariableGapTermsIndexWriter extends TermsIndexWriterBase {
|
||||||
} finally {
|
} finally {
|
||||||
text.length = lengthSave;
|
text.length = lengthSave;
|
||||||
}
|
}
|
||||||
lastTerm.copy(text);
|
lastTerm.copyBytes(text);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -407,7 +407,7 @@ public class Lucene3xFields extends FieldsProducer {
|
||||||
getTermsDict().seekEnum(termEnum, seekTermEnum.term(), true);
|
getTermsDict().seekEnum(termEnum, seekTermEnum.term(), true);
|
||||||
//newSuffixStart = downTo+4;
|
//newSuffixStart = downTo+4;
|
||||||
newSuffixStart = downTo;
|
newSuffixStart = downTo;
|
||||||
scratchTerm.copy(termEnum.term().bytes());
|
scratchTerm.copyBytes(termEnum.term().bytes());
|
||||||
didSeek = true;
|
didSeek = true;
|
||||||
if (DEBUG_SURROGATES) {
|
if (DEBUG_SURROGATES) {
|
||||||
System.out.println(" seek!");
|
System.out.println(" seek!");
|
||||||
|
@ -481,7 +481,7 @@ public class Lucene3xFields extends FieldsProducer {
|
||||||
// done no scanning (eg, term was precisely
|
// done no scanning (eg, term was precisely
|
||||||
// and index term, or, was in the term seek
|
// and index term, or, was in the term seek
|
||||||
// cache):
|
// cache):
|
||||||
scratchTerm.copy(b2);
|
scratchTerm.copyBytes(b2);
|
||||||
setNewSuffixStart(prevTerm, scratchTerm);
|
setNewSuffixStart(prevTerm, scratchTerm);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
@ -554,7 +554,7 @@ public class Lucene3xFields extends FieldsProducer {
|
||||||
if (termEnum.term() == null || termEnum.term().field() != internedFieldName) {
|
if (termEnum.term() == null || termEnum.term().field() != internedFieldName) {
|
||||||
scratchTerm.length = 0;
|
scratchTerm.length = 0;
|
||||||
} else {
|
} else {
|
||||||
scratchTerm.copy(termEnum.term().bytes());
|
scratchTerm.copyBytes(termEnum.term().bytes());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (DEBUG_SURROGATES) {
|
if (DEBUG_SURROGATES) {
|
||||||
|
@ -687,7 +687,7 @@ public class Lucene3xFields extends FieldsProducer {
|
||||||
// TODO: more efficient seek?
|
// TODO: more efficient seek?
|
||||||
getTermsDict().seekEnum(termEnum, seekTermEnum.term(), true);
|
getTermsDict().seekEnum(termEnum, seekTermEnum.term(), true);
|
||||||
|
|
||||||
scratchTerm.copy(seekTermEnum.term().bytes());
|
scratchTerm.copyBytes(seekTermEnum.term().bytes());
|
||||||
|
|
||||||
// +3 because we don't need to check the char
|
// +3 because we don't need to check the char
|
||||||
// at upTo: we know it's > BMP
|
// at upTo: we know it's > BMP
|
||||||
|
@ -788,7 +788,7 @@ public class Lucene3xFields extends FieldsProducer {
|
||||||
|
|
||||||
// We hit EOF; try end-case surrogate dance: if we
|
// We hit EOF; try end-case surrogate dance: if we
|
||||||
// find an E, try swapping in S, backwards:
|
// find an E, try swapping in S, backwards:
|
||||||
scratchTerm.copy(term);
|
scratchTerm.copyBytes(term);
|
||||||
|
|
||||||
assert scratchTerm.offset == 0;
|
assert scratchTerm.offset == 0;
|
||||||
|
|
||||||
|
@ -800,7 +800,7 @@ public class Lucene3xFields extends FieldsProducer {
|
||||||
|
|
||||||
if (seekToNonBMP(seekTermEnum, scratchTerm, i)) {
|
if (seekToNonBMP(seekTermEnum, scratchTerm, i)) {
|
||||||
|
|
||||||
scratchTerm.copy(seekTermEnum.term().bytes());
|
scratchTerm.copyBytes(seekTermEnum.term().bytes());
|
||||||
getTermsDict().seekEnum(termEnum, seekTermEnum.term(), useCache);
|
getTermsDict().seekEnum(termEnum, seekTermEnum.term(), useCache);
|
||||||
|
|
||||||
newSuffixStart = 1+i;
|
newSuffixStart = 1+i;
|
||||||
|
@ -826,7 +826,7 @@ public class Lucene3xFields extends FieldsProducer {
|
||||||
// We found a non-exact but non-null term; this one
|
// We found a non-exact but non-null term; this one
|
||||||
// is fun -- just treat it like next, by pretending
|
// is fun -- just treat it like next, by pretending
|
||||||
// requested term was prev:
|
// requested term was prev:
|
||||||
prevTerm.copy(term);
|
prevTerm.copyBytes(term);
|
||||||
|
|
||||||
if (DEBUG_SURROGATES) {
|
if (DEBUG_SURROGATES) {
|
||||||
System.out.println(" seek hit non-exact term=" + UnicodeUtil.toHexString(t.text()));
|
System.out.println(" seek hit non-exact term=" + UnicodeUtil.toHexString(t.text()));
|
||||||
|
@ -895,7 +895,7 @@ public class Lucene3xFields extends FieldsProducer {
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: can we use STE's prevBuffer here?
|
// TODO: can we use STE's prevBuffer here?
|
||||||
prevTerm.copy(termEnum.term().bytes());
|
prevTerm.copyBytes(termEnum.term().bytes());
|
||||||
|
|
||||||
if (termEnum.next() && termEnum.term().field() == internedFieldName) {
|
if (termEnum.next() && termEnum.term().field() == internedFieldName) {
|
||||||
newSuffixStart = termEnum.newSuffixStart;
|
newSuffixStart = termEnum.newSuffixStart;
|
||||||
|
|
|
@ -78,7 +78,7 @@ final class TermBuffer implements Cloneable {
|
||||||
reset();
|
reset();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
bytes.copy(term.bytes());
|
bytes.copyBytes(term.bytes());
|
||||||
field = term.field().intern();
|
field = term.field().intern();
|
||||||
currentFieldNumber = -1;
|
currentFieldNumber = -1;
|
||||||
this.term = term;
|
this.term = term;
|
||||||
|
@ -90,7 +90,7 @@ final class TermBuffer implements Cloneable {
|
||||||
// dangerous to copy Term over, since the underlying
|
// dangerous to copy Term over, since the underlying
|
||||||
// BytesRef could subsequently be modified:
|
// BytesRef could subsequently be modified:
|
||||||
term = null;
|
term = null;
|
||||||
bytes.copy(other.bytes);
|
bytes.copyBytes(other.bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void reset() {
|
public void reset() {
|
||||||
|
@ -104,7 +104,7 @@ final class TermBuffer implements Cloneable {
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
if (term == null) {
|
if (term == null) {
|
||||||
term = new Term(field, new BytesRef(bytes));
|
term = new Term(field, BytesRef.deepCopyOf(bytes));
|
||||||
}
|
}
|
||||||
|
|
||||||
return term;
|
return term;
|
||||||
|
@ -116,7 +116,7 @@ final class TermBuffer implements Cloneable {
|
||||||
try {
|
try {
|
||||||
clone = (TermBuffer)super.clone();
|
clone = (TermBuffer)super.clone();
|
||||||
} catch (CloneNotSupportedException e) {}
|
} catch (CloneNotSupportedException e) {}
|
||||||
clone.bytes = new BytesRef(bytes);
|
clone.bytes = BytesRef.deepCopyOf(bytes);
|
||||||
return clone;
|
return clone;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -213,7 +213,7 @@ public class MemoryPostingsFormat extends PostingsFormat {
|
||||||
System.out.println(" " + Integer.toHexString(finalBuffer[i]&0xFF));
|
System.out.println(" " + Integer.toHexString(finalBuffer[i]&0xFF));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
builder.add(text, new BytesRef(spare));
|
builder.add(text, BytesRef.deepCopyOf(spare));
|
||||||
termCount++;
|
termCount++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -183,9 +183,9 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
|
||||||
pos.docID = currentDoc.docID;
|
pos.docID = currentDoc.docID;
|
||||||
if (payload != null && payload.length > 0) {
|
if (payload != null && payload.length > 0) {
|
||||||
if (pos.payload == null) {
|
if (pos.payload == null) {
|
||||||
pos.payload = new BytesRef(payload);
|
pos.payload = BytesRef.deepCopyOf(payload);
|
||||||
} else {
|
} else {
|
||||||
pos.payload.copy(payload);
|
pos.payload.copyBytes(payload);
|
||||||
}
|
}
|
||||||
} else if (pos.payload != null) {
|
} else if (pos.payload != null) {
|
||||||
pos.payload.length = 0;
|
pos.payload.length = 0;
|
||||||
|
|
|
@ -295,7 +295,7 @@ final class SortedBytesMergeUtils {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesRef getByOrd(int ord, BytesRef bytesRef) {
|
public BytesRef getByOrd(int ord, BytesRef bytesRef) {
|
||||||
bytesRef.copy(missingValue);
|
bytesRef.copyBytes(missingValue);
|
||||||
return bytesRef;
|
return bytesRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -240,7 +240,7 @@ public final class FuzzyTermsEnum extends TermsEnum {
|
||||||
this.bottomTerm = bottomTerm;
|
this.bottomTerm = bottomTerm;
|
||||||
// clone the term before potentially doing something with it
|
// clone the term before potentially doing something with it
|
||||||
// this is a rare but wonderful occurrence anyway
|
// this is a rare but wonderful occurrence anyway
|
||||||
queuedBottom = new BytesRef(term);
|
queuedBottom = BytesRef.deepCopyOf(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
return term;
|
return term;
|
||||||
|
|
|
@ -91,12 +91,12 @@ public abstract class TopTermsRewrite<Q extends Query> extends TermCollectingRew
|
||||||
private BytesRef lastTerm;
|
private BytesRef lastTerm;
|
||||||
private boolean compareToLastTerm(BytesRef t) throws IOException {
|
private boolean compareToLastTerm(BytesRef t) throws IOException {
|
||||||
if (lastTerm == null && t != null) {
|
if (lastTerm == null && t != null) {
|
||||||
lastTerm = new BytesRef(t);
|
lastTerm = BytesRef.deepCopyOf(t);
|
||||||
} else if (t == null) {
|
} else if (t == null) {
|
||||||
lastTerm = null;
|
lastTerm = null;
|
||||||
} else {
|
} else {
|
||||||
assert termsEnum.getComparator().compare(lastTerm, t) < 0: "lastTerm=" + lastTerm + " t=" + t;
|
assert termsEnum.getComparator().compare(lastTerm, t) < 0: "lastTerm=" + lastTerm + " t=" + t;
|
||||||
lastTerm.copy(t);
|
lastTerm.copyBytes(t);
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -127,7 +127,7 @@ public abstract class TopTermsRewrite<Q extends Query> extends TermCollectingRew
|
||||||
t.termState.register(state, readerContext.ord, termsEnum.docFreq(), termsEnum.totalTermFreq());
|
t.termState.register(state, readerContext.ord, termsEnum.docFreq(), termsEnum.totalTermFreq());
|
||||||
} else {
|
} else {
|
||||||
// add new entry in PQ, we must clone the term, else it may get overwritten!
|
// add new entry in PQ, we must clone the term, else it may get overwritten!
|
||||||
st.bytes.copy(bytes);
|
st.bytes.copyBytes(bytes);
|
||||||
st.boost = boost;
|
st.boost = boost;
|
||||||
visitedTerms.put(st.bytes, st);
|
visitedTerms.put(st.bytes, st);
|
||||||
assert st.termState.docFreq() == 0;
|
assert st.termState.docFreq() == 0;
|
||||||
|
|
|
@ -24,7 +24,7 @@ import java.util.Comparator;
|
||||||
* use {@link #EMPTY_BYTES} if necessary.
|
* use {@link #EMPTY_BYTES} if necessary.
|
||||||
*
|
*
|
||||||
* @lucene.experimental */
|
* @lucene.experimental */
|
||||||
public final class BytesRef implements Comparable<BytesRef> {
|
public final class BytesRef implements Comparable<BytesRef>,Cloneable {
|
||||||
|
|
||||||
static final int HASH_PRIME = 31;
|
static final int HASH_PRIME = 31;
|
||||||
public static final byte[] EMPTY_BYTES = new byte[0];
|
public static final byte[] EMPTY_BYTES = new byte[0];
|
||||||
|
@ -72,34 +72,16 @@ public final class BytesRef implements Comparable<BytesRef> {
|
||||||
*/
|
*/
|
||||||
public BytesRef(CharSequence text) {
|
public BytesRef(CharSequence text) {
|
||||||
this();
|
this();
|
||||||
copy(text);
|
copyChars(text);
|
||||||
}
|
}
|
||||||
|
|
||||||
public BytesRef(BytesRef other) {
|
|
||||||
this();
|
|
||||||
copy(other);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* // maybe?
|
|
||||||
public BytesRef(BytesRef other, boolean shallow) {
|
|
||||||
this();
|
|
||||||
if (shallow) {
|
|
||||||
offset = other.offset;
|
|
||||||
length = other.length;
|
|
||||||
bytes = other.bytes;
|
|
||||||
} else {
|
|
||||||
copy(other);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Copies the UTF8 bytes for this string.
|
* Copies the UTF8 bytes for this string.
|
||||||
*
|
*
|
||||||
* @param text Must be well-formed unicode text, with no
|
* @param text Must be well-formed unicode text, with no
|
||||||
* unpaired surrogates or invalid UTF16 code units.
|
* unpaired surrogates or invalid UTF16 code units.
|
||||||
*/
|
*/
|
||||||
public void copy(CharSequence text) {
|
public void copyChars(CharSequence text) {
|
||||||
UnicodeUtil.UTF16toUTF8(text, 0, text.length(), this);
|
UnicodeUtil.UTF16toUTF8(text, 0, text.length(), this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -120,8 +102,8 @@ public final class BytesRef implements Comparable<BytesRef> {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object clone() {
|
public BytesRef clone() {
|
||||||
return new BytesRef(this);
|
return new BytesRef(bytes, offset, length);
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean sliceEquals(BytesRef other, int pos) {
|
private boolean sliceEquals(BytesRef other, int pos) {
|
||||||
|
@ -207,12 +189,12 @@ public final class BytesRef implements Comparable<BytesRef> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Copies the given {@link BytesRef}
|
* Copies the bytes from the given {@link BytesRef}
|
||||||
* <p>
|
* <p>
|
||||||
* NOTE: this method resets the offset to 0 and resizes the reference array
|
* NOTE: this method resets the offset to 0 and resizes the reference array
|
||||||
* if needed.
|
* if needed.
|
||||||
*/
|
*/
|
||||||
public void copy(BytesRef other) {
|
public void copyBytes(BytesRef other) {
|
||||||
if (bytes.length < other.length) {
|
if (bytes.length < other.length) {
|
||||||
bytes = new byte[other.length];
|
bytes = new byte[other.length];
|
||||||
}
|
}
|
||||||
|
@ -355,4 +337,17 @@ public final class BytesRef implements Comparable<BytesRef> {
|
||||||
return a.length - b.length;
|
return a.length - b.length;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new BytesRef that points to a copy of the bytes from
|
||||||
|
* <code>other</code>
|
||||||
|
* <p>
|
||||||
|
* The returned BytesRef will have a length of other.length
|
||||||
|
* and an offset of zero.
|
||||||
|
*/
|
||||||
|
public static BytesRef deepCopyOf(BytesRef other) {
|
||||||
|
BytesRef copy = new BytesRef();
|
||||||
|
copy.copyBytes(other);
|
||||||
|
return copy;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ import java.util.Comparator;
|
||||||
* {@link #EMPTY_ARRAY} if necessary.
|
* {@link #EMPTY_ARRAY} if necessary.
|
||||||
* @lucene.internal
|
* @lucene.internal
|
||||||
*/
|
*/
|
||||||
public final class CharsRef implements Comparable<CharsRef>, CharSequence {
|
public final class CharsRef implements Comparable<CharsRef>, CharSequence, Cloneable {
|
||||||
private static final char[] EMPTY_ARRAY = new char[0];
|
private static final char[] EMPTY_ARRAY = new char[0];
|
||||||
public char[] chars;
|
public char[] chars;
|
||||||
public int offset;
|
public int offset;
|
||||||
|
@ -68,18 +68,9 @@ public final class CharsRef implements Comparable<CharsRef>, CharSequence {
|
||||||
this.length = chars.length;
|
this.length = chars.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Creates a new {@link CharsRef} and copies the contents of the source into
|
|
||||||
* the new instance.
|
|
||||||
* @see #copy(CharsRef)
|
|
||||||
*/
|
|
||||||
public CharsRef(CharsRef other) {
|
|
||||||
copy(other);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object clone() {
|
public CharsRef clone() {
|
||||||
return new CharsRef(this);
|
return new CharsRef(chars, offset, length);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -168,7 +159,8 @@ public final class CharsRef implements Comparable<CharsRef>, CharSequence {
|
||||||
* @param other
|
* @param other
|
||||||
* the {@link CharsRef} to copy
|
* the {@link CharsRef} to copy
|
||||||
*/
|
*/
|
||||||
public void copy(CharsRef other) {
|
// TODO: why does this behave differently/not invoke copyChars(char[], int, int) ???
|
||||||
|
public void copyChars(CharsRef other) {
|
||||||
if (chars == null) {
|
if (chars == null) {
|
||||||
chars = new char[other.length];
|
chars = new char[other.length];
|
||||||
} else {
|
} else {
|
||||||
|
@ -188,7 +180,7 @@ public final class CharsRef implements Comparable<CharsRef>, CharSequence {
|
||||||
/**
|
/**
|
||||||
* Copies the given array into this CharsRef starting at offset 0
|
* Copies the given array into this CharsRef starting at offset 0
|
||||||
*/
|
*/
|
||||||
public void copy(char[] otherChars, int otherOffset, int otherLength) {
|
public void copyChars(char[] otherChars, int otherOffset, int otherLength) {
|
||||||
grow(otherLength);
|
grow(otherLength);
|
||||||
System.arraycopy(otherChars, otherOffset, this.chars, 0,
|
System.arraycopy(otherChars, otherOffset, this.chars, 0,
|
||||||
otherLength);
|
otherLength);
|
||||||
|
@ -275,4 +267,17 @@ public final class CharsRef implements Comparable<CharsRef>, CharSequence {
|
||||||
return a.length - b.length;
|
return a.length - b.length;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new CharsRef that points to a copy of the chars from
|
||||||
|
* <code>other</code>
|
||||||
|
* <p>
|
||||||
|
* The returned CharsRef will have a length of other.length
|
||||||
|
* and an offset of zero.
|
||||||
|
*/
|
||||||
|
public static CharsRef deepCopyOf(CharsRef other) {
|
||||||
|
CharsRef clone = new CharsRef();
|
||||||
|
clone.copyChars(other);
|
||||||
|
return clone;
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -22,7 +22,7 @@ package org.apache.lucene.util;
|
||||||
* {@link #EMPTY_INTS} if necessary.
|
* {@link #EMPTY_INTS} if necessary.
|
||||||
*
|
*
|
||||||
* @lucene.internal */
|
* @lucene.internal */
|
||||||
public final class IntsRef implements Comparable<IntsRef> {
|
public final class IntsRef implements Comparable<IntsRef>, Cloneable {
|
||||||
|
|
||||||
public static final int[] EMPTY_INTS = new int[0];
|
public static final int[] EMPTY_INTS = new int[0];
|
||||||
|
|
||||||
|
@ -43,13 +43,9 @@ public final class IntsRef implements Comparable<IntsRef> {
|
||||||
this.length = length;
|
this.length = length;
|
||||||
}
|
}
|
||||||
|
|
||||||
public IntsRef(IntsRef other) {
|
|
||||||
copy(other);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object clone() {
|
public IntsRef clone() {
|
||||||
return new IntsRef(this);
|
return new IntsRef(ints, offset, length);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -109,7 +105,7 @@ public final class IntsRef implements Comparable<IntsRef> {
|
||||||
return this.length - other.length;
|
return this.length - other.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void copy(IntsRef other) {
|
public void copyInts(IntsRef other) {
|
||||||
if (ints == null) {
|
if (ints == null) {
|
||||||
ints = new int[other.length];
|
ints = new int[other.length];
|
||||||
} else {
|
} else {
|
||||||
|
@ -140,4 +136,17 @@ public final class IntsRef implements Comparable<IntsRef> {
|
||||||
sb.append(']');
|
sb.append(']');
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new IntsRef that points to a copy of the ints from
|
||||||
|
* <code>other</code>
|
||||||
|
* <p>
|
||||||
|
* The returned IntsRef will have a length of other.length
|
||||||
|
* and an offset of zero.
|
||||||
|
*/
|
||||||
|
public static IntsRef deepCopyOf(IntsRef other) {
|
||||||
|
IntsRef clone = new IntsRef();
|
||||||
|
clone.copyInts(other);
|
||||||
|
return clone;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -462,7 +462,7 @@ public class Builder<T> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// save last input
|
// save last input
|
||||||
lastInput.copy(input);
|
lastInput.copyInts(input);
|
||||||
|
|
||||||
//System.out.println(" count[0]=" + frontier[0].inputCount);
|
//System.out.println(" count[0]=" + frontier[0].inputCount);
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,13 +30,13 @@ public class MutableValueStr extends MutableValue {
|
||||||
public void copy(MutableValue source) {
|
public void copy(MutableValue source) {
|
||||||
MutableValueStr s = (MutableValueStr) source;
|
MutableValueStr s = (MutableValueStr) source;
|
||||||
exists = s.exists;
|
exists = s.exists;
|
||||||
value.copy(s.value);
|
value.copyBytes(s.value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public MutableValue duplicate() {
|
public MutableValue duplicate() {
|
||||||
MutableValueStr v = new MutableValueStr();
|
MutableValueStr v = new MutableValueStr();
|
||||||
v.value.copy(value);
|
v.value.copyBytes(value);
|
||||||
v.exists = this.exists;
|
v.exists = this.exists;
|
||||||
return v;
|
return v;
|
||||||
}
|
}
|
||||||
|
|
|
@ -279,7 +279,7 @@ public abstract class CollationTestBase extends LuceneTestCase {
|
||||||
assertTrue(ts.incrementToken());
|
assertTrue(ts.incrementToken());
|
||||||
termAtt.fillBytesRef();
|
termAtt.fillBytesRef();
|
||||||
// ensure we make a copy of the actual bytes too
|
// ensure we make a copy of the actual bytes too
|
||||||
map.put(term, new BytesRef(bytes));
|
map.put(term, BytesRef.deepCopyOf(bytes));
|
||||||
}
|
}
|
||||||
|
|
||||||
Thread threads[] = new Thread[numThreads];
|
Thread threads[] = new Thread[numThreads];
|
||||||
|
|
|
@ -170,7 +170,7 @@ final class TermInfosWriter implements Closeable {
|
||||||
return cmp;
|
return cmp;
|
||||||
}
|
}
|
||||||
|
|
||||||
scratchBytes.copy(term);
|
scratchBytes.copyBytes(term);
|
||||||
assert lastTerm.offset == 0;
|
assert lastTerm.offset == 0;
|
||||||
UnicodeUtil.UTF8toUTF16(lastTerm.bytes, 0, lastTerm.length, utf16Result1);
|
UnicodeUtil.UTF8toUTF16(lastTerm.bytes, 0, lastTerm.length, utf16Result1);
|
||||||
|
|
||||||
|
@ -255,7 +255,7 @@ final class TermInfosWriter implements Closeable {
|
||||||
output.writeVInt(length); // write delta length
|
output.writeVInt(length); // write delta length
|
||||||
output.writeBytes(term.bytes, start+term.offset, length); // write delta bytes
|
output.writeBytes(term.bytes, start+term.offset, length); // write delta bytes
|
||||||
output.writeVInt(fieldNumber); // write field num
|
output.writeVInt(fieldNumber); // write field num
|
||||||
lastTerm.copy(term);
|
lastTerm.copyBytes(term);
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Called to complete TermInfos creation. */
|
/** Called to complete TermInfos creation. */
|
||||||
|
|
|
@ -324,7 +324,7 @@ public final class DaciukMihovAutomatonBuilder {
|
||||||
private boolean setPrevious(CharsRef current) {
|
private boolean setPrevious(CharsRef current) {
|
||||||
// don't need to copy, once we fix https://issues.apache.org/jira/browse/LUCENE-3277
|
// don't need to copy, once we fix https://issues.apache.org/jira/browse/LUCENE-3277
|
||||||
// still, called only from assert
|
// still, called only from assert
|
||||||
previous = new CharsRef(current);
|
previous = CharsRef.deepCopyOf(current);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -77,7 +77,7 @@ public class Test2BTerms extends LuceneTestCase {
|
||||||
random.nextBytes(bytes.bytes);
|
random.nextBytes(bytes.bytes);
|
||||||
tokenCount++;
|
tokenCount++;
|
||||||
if (--nextSave == 0) {
|
if (--nextSave == 0) {
|
||||||
savedTerms.add(new BytesRef(bytes));
|
savedTerms.add(BytesRef.deepCopyOf(bytes));
|
||||||
System.out.println("TEST: save term=" + bytes);
|
System.out.println("TEST: save term=" + bytes);
|
||||||
nextSave = _TestUtil.nextInt(random, 500000, 1000000);
|
nextSave = _TestUtil.nextInt(random, 500000, 1000000);
|
||||||
}
|
}
|
||||||
|
@ -231,7 +231,7 @@ public class Test2BTerms extends LuceneTestCase {
|
||||||
BytesRef term;
|
BytesRef term;
|
||||||
while((term = termsEnum.next()) != null) {
|
while((term = termsEnum.next()) != null) {
|
||||||
if (--nextSave == 0) {
|
if (--nextSave == 0) {
|
||||||
savedTerms.add(new BytesRef(term));
|
savedTerms.add(BytesRef.deepCopyOf(term));
|
||||||
System.out.println("TEST: add " + term);
|
System.out.println("TEST: add " + term);
|
||||||
nextSave = _TestUtil.nextInt(random, 500000, 1000000);
|
nextSave = _TestUtil.nextInt(random, 500000, 1000000);
|
||||||
}
|
}
|
||||||
|
|
|
@ -75,7 +75,7 @@ public class TestDocumentsWriterDeleteQueue extends LuceneTestCase {
|
||||||
HashSet<Term> frozenSet = new HashSet<Term>();
|
HashSet<Term> frozenSet = new HashSet<Term>();
|
||||||
for (Term t : queue.freezeGlobalBuffer(null).termsIterable()) {
|
for (Term t : queue.freezeGlobalBuffer(null).termsIterable()) {
|
||||||
BytesRef bytesRef = new BytesRef();
|
BytesRef bytesRef = new BytesRef();
|
||||||
bytesRef.copy(t.bytes);
|
bytesRef.copyBytes(t.bytes);
|
||||||
frozenSet.add(new Term(t.field, bytesRef));
|
frozenSet.add(new Term(t.field, bytesRef));
|
||||||
}
|
}
|
||||||
assertEquals(uniqueValues, frozenSet);
|
assertEquals(uniqueValues, frozenSet);
|
||||||
|
@ -204,7 +204,7 @@ public class TestDocumentsWriterDeleteQueue extends LuceneTestCase {
|
||||||
Set<Term> frozenSet = new HashSet<Term>();
|
Set<Term> frozenSet = new HashSet<Term>();
|
||||||
for (Term t : queue.freezeGlobalBuffer(null).termsIterable()) {
|
for (Term t : queue.freezeGlobalBuffer(null).termsIterable()) {
|
||||||
BytesRef bytesRef = new BytesRef();
|
BytesRef bytesRef = new BytesRef();
|
||||||
bytesRef.copy(t.bytes);
|
bytesRef.copyBytes(t.bytes);
|
||||||
frozenSet.add(new Term(t.field, bytesRef));
|
frozenSet.add(new Term(t.field, bytesRef));
|
||||||
}
|
}
|
||||||
assertEquals("num deletes must be 0 after freeze", 0, queue
|
assertEquals("num deletes must be 0 after freeze", 0, queue
|
||||||
|
|
|
@ -148,7 +148,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
assertTrue(last.compareTo(term) < 0);
|
assertTrue(last.compareTo(term) < 0);
|
||||||
last.copy(term);
|
last.copyBytes(term);
|
||||||
|
|
||||||
final String s = term.utf8ToString();
|
final String s = term.utf8ToString();
|
||||||
assertTrue("term " + termDesc(s) + " was not added to index (count=" + allTerms.size() + ")", allTerms.contains(s));
|
assertTrue("term " + termDesc(s) + " was not added to index (count=" + allTerms.size() + ")", allTerms.contains(s));
|
||||||
|
|
|
@ -65,7 +65,7 @@ public class TestTermsEnum extends LuceneTestCase {
|
||||||
final TermsEnum termsEnum = MultiFields.getTerms(r, "body").iterator(null);
|
final TermsEnum termsEnum = MultiFields.getTerms(r, "body").iterator(null);
|
||||||
BytesRef term;
|
BytesRef term;
|
||||||
while((term = termsEnum.next()) != null) {
|
while((term = termsEnum.next()) != null) {
|
||||||
terms.add(new BytesRef(term));
|
terms.add(BytesRef.deepCopyOf(term));
|
||||||
}
|
}
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("TEST: " + terms.size() + " terms");
|
System.out.println("TEST: " + terms.size() + " terms");
|
||||||
|
@ -310,7 +310,7 @@ public class TestTermsEnum extends LuceneTestCase {
|
||||||
if (startTerm == null) {
|
if (startTerm == null) {
|
||||||
loc = 0;
|
loc = 0;
|
||||||
} else {
|
} else {
|
||||||
loc = Arrays.binarySearch(termsArray, new BytesRef(startTerm));
|
loc = Arrays.binarySearch(termsArray, BytesRef.deepCopyOf(startTerm));
|
||||||
if (loc < 0) {
|
if (loc < 0) {
|
||||||
loc = -(loc+1);
|
loc = -(loc+1);
|
||||||
} else {
|
} else {
|
||||||
|
@ -648,7 +648,7 @@ public class TestTermsEnum extends LuceneTestCase {
|
||||||
} else {
|
} else {
|
||||||
// pick valid term
|
// pick valid term
|
||||||
loc = random.nextInt(validTerms.length);
|
loc = random.nextInt(validTerms.length);
|
||||||
t = new BytesRef(validTerms[loc]);
|
t = BytesRef.deepCopyOf(validTerms[loc]);
|
||||||
termState = null;
|
termState = null;
|
||||||
if (VERBOSE) {
|
if (VERBOSE) {
|
||||||
System.out.println("\nTEST: valid term=" + t.utf8ToString());
|
System.out.println("\nTEST: valid term=" + t.utf8ToString());
|
||||||
|
|
|
@ -166,7 +166,7 @@ public class TestTermsEnum2 extends LuceneTestCase {
|
||||||
Automaton expected = BasicOperations.intersection(termsAutomaton, automaton);
|
Automaton expected = BasicOperations.intersection(termsAutomaton, automaton);
|
||||||
TreeSet<BytesRef> found = new TreeSet<BytesRef>();
|
TreeSet<BytesRef> found = new TreeSet<BytesRef>();
|
||||||
while (te.next() != null) {
|
while (te.next() != null) {
|
||||||
found.add(new BytesRef(te.term()));
|
found.add(BytesRef.deepCopyOf(te.term()));
|
||||||
}
|
}
|
||||||
|
|
||||||
Automaton actual = DaciukMihovAutomatonBuilder.build(found);
|
Automaton actual = DaciukMihovAutomatonBuilder.build(found);
|
||||||
|
|
|
@ -121,10 +121,10 @@ public class TestSurrogates extends LuceneTestCase {
|
||||||
System.out.println();
|
System.out.println();
|
||||||
}
|
}
|
||||||
if (lastText == null) {
|
if (lastText == null) {
|
||||||
lastText = new BytesRef(text);
|
lastText = BytesRef.deepCopyOf(text);
|
||||||
} else {
|
} else {
|
||||||
assertTrue(lastText.compareTo(text) < 0);
|
assertTrue(lastText.compareTo(text) < 0);
|
||||||
lastText.copy(text);
|
lastText.copyBytes(text);
|
||||||
}
|
}
|
||||||
assertEquals(exp.field(), field);
|
assertEquals(exp.field(), field);
|
||||||
assertEquals(exp.bytes(), text);
|
assertEquals(exp.bytes(), text);
|
||||||
|
|
|
@ -134,8 +134,8 @@ public class TestDocValues extends LuceneTestCase {
|
||||||
// random string was after our last
|
// random string was after our last
|
||||||
assertTrue(lastRef.compareTo(bytesValue) < 0);
|
assertTrue(lastRef.compareTo(bytesValue) < 0);
|
||||||
} else {
|
} else {
|
||||||
final BytesRef before = (BytesRef) ss.getByOrd(insertIndex-1, bytesRef)
|
// TODO: I don't think this actually needs a deep copy?
|
||||||
.clone();
|
final BytesRef before = BytesRef.deepCopyOf(ss.getByOrd(insertIndex-1, bytesRef));
|
||||||
BytesRef after = ss.getByOrd(insertIndex, bytesRef);
|
BytesRef after = ss.getByOrd(insertIndex, bytesRef);
|
||||||
assertTrue(COMP.compare(before, bytesValue) < 0);
|
assertTrue(COMP.compare(before, bytesValue) < 0);
|
||||||
assertTrue(COMP.compare(bytesValue, after) < 0);
|
assertTrue(COMP.compare(bytesValue, after) < 0);
|
||||||
|
|
|
@ -428,7 +428,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
|
||||||
if (last != null) {
|
if (last != null) {
|
||||||
assertTrue(last.compareTo(cur) < 0);
|
assertTrue(last.compareTo(cur) < 0);
|
||||||
}
|
}
|
||||||
last = new BytesRef(cur);
|
last = BytesRef.deepCopyOf(cur);
|
||||||
}
|
}
|
||||||
// LUCENE-3314: the results after next() already returned null are undefined,
|
// LUCENE-3314: the results after next() already returned null are undefined,
|
||||||
// assertNull(termEnum.next());
|
// assertNull(termEnum.next());
|
||||||
|
|
|
@ -447,7 +447,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
|
||||||
if (last != null) {
|
if (last != null) {
|
||||||
assertTrue(last.compareTo(cur) < 0);
|
assertTrue(last.compareTo(cur) < 0);
|
||||||
}
|
}
|
||||||
last = new BytesRef(cur);
|
last = BytesRef.deepCopyOf(cur);
|
||||||
}
|
}
|
||||||
// LUCENE-3314: the results after next() already returned null are undefined,
|
// LUCENE-3314: the results after next() already returned null are undefined,
|
||||||
// assertNull(termEnum.next());
|
// assertNull(termEnum.next());
|
||||||
|
|
|
@ -37,7 +37,7 @@ public class TestByteBlockPool extends LuceneTestCase {
|
||||||
final String value = _TestUtil.randomRealisticUnicodeString(random,
|
final String value = _TestUtil.randomRealisticUnicodeString(random,
|
||||||
maxLength);
|
maxLength);
|
||||||
list.add(value);
|
list.add(value);
|
||||||
ref.copy(value);
|
ref.copyChars(value);
|
||||||
pool.copy(ref);
|
pool.copy(ref);
|
||||||
}
|
}
|
||||||
RAMDirectory dir = new RAMDirectory();
|
RAMDirectory dir = new RAMDirectory();
|
||||||
|
@ -50,7 +50,7 @@ public class TestByteBlockPool extends LuceneTestCase {
|
||||||
BytesRef expected = new BytesRef();
|
BytesRef expected = new BytesRef();
|
||||||
BytesRef actual = new BytesRef();
|
BytesRef actual = new BytesRef();
|
||||||
for (String string : list) {
|
for (String string : list) {
|
||||||
expected.copy(string);
|
expected.copyChars(string);
|
||||||
actual.grow(expected.length);
|
actual.grow(expected.length);
|
||||||
actual.length = expected.length;
|
actual.length = expected.length;
|
||||||
input.readBytes(actual.bytes, 0, actual.length);
|
input.readBytes(actual.bytes, 0, actual.length);
|
||||||
|
|
|
@ -73,7 +73,7 @@ public class TestBytesRefHash extends LuceneTestCase {
|
||||||
do {
|
do {
|
||||||
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
||||||
} while (str.length() == 0);
|
} while (str.length() == 0);
|
||||||
ref.copy(str);
|
ref.copyChars(str);
|
||||||
int count = hash.size();
|
int count = hash.size();
|
||||||
int key = hash.add(ref);
|
int key = hash.add(ref);
|
||||||
if (key < 0)
|
if (key < 0)
|
||||||
|
@ -107,7 +107,7 @@ public class TestBytesRefHash extends LuceneTestCase {
|
||||||
do {
|
do {
|
||||||
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
||||||
} while (str.length() == 0);
|
} while (str.length() == 0);
|
||||||
ref.copy(str);
|
ref.copyChars(str);
|
||||||
int count = hash.size();
|
int count = hash.size();
|
||||||
int key = hash.add(ref);
|
int key = hash.add(ref);
|
||||||
if (key >= 0) {
|
if (key >= 0) {
|
||||||
|
@ -121,7 +121,7 @@ public class TestBytesRefHash extends LuceneTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (Entry<String, Integer> entry : strings.entrySet()) {
|
for (Entry<String, Integer> entry : strings.entrySet()) {
|
||||||
ref.copy(entry.getKey());
|
ref.copyChars(entry.getKey());
|
||||||
assertEquals(ref, hash.get(entry.getValue().intValue(), scratch));
|
assertEquals(ref, hash.get(entry.getValue().intValue(), scratch));
|
||||||
}
|
}
|
||||||
hash.clear();
|
hash.clear();
|
||||||
|
@ -146,7 +146,7 @@ public class TestBytesRefHash extends LuceneTestCase {
|
||||||
do {
|
do {
|
||||||
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
||||||
} while (str.length() == 0);
|
} while (str.length() == 0);
|
||||||
ref.copy(str);
|
ref.copyChars(str);
|
||||||
final int key = hash.add(ref);
|
final int key = hash.add(ref);
|
||||||
if (key < 0) {
|
if (key < 0) {
|
||||||
assertTrue(bits.get((-key)-1));
|
assertTrue(bits.get((-key)-1));
|
||||||
|
@ -186,7 +186,7 @@ public class TestBytesRefHash extends LuceneTestCase {
|
||||||
do {
|
do {
|
||||||
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
||||||
} while (str.length() == 0);
|
} while (str.length() == 0);
|
||||||
ref.copy(str);
|
ref.copyChars(str);
|
||||||
hash.add(ref);
|
hash.add(ref);
|
||||||
strings.add(str);
|
strings.add(str);
|
||||||
}
|
}
|
||||||
|
@ -197,7 +197,7 @@ public class TestBytesRefHash extends LuceneTestCase {
|
||||||
int i = 0;
|
int i = 0;
|
||||||
BytesRef scratch = new BytesRef();
|
BytesRef scratch = new BytesRef();
|
||||||
for (String string : strings) {
|
for (String string : strings) {
|
||||||
ref.copy(string);
|
ref.copyChars(string);
|
||||||
assertEquals(ref, hash.get(sort[i++], scratch));
|
assertEquals(ref, hash.get(sort[i++], scratch));
|
||||||
}
|
}
|
||||||
hash.clear();
|
hash.clear();
|
||||||
|
@ -225,7 +225,7 @@ public class TestBytesRefHash extends LuceneTestCase {
|
||||||
do {
|
do {
|
||||||
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
||||||
} while (str.length() == 0);
|
} while (str.length() == 0);
|
||||||
ref.copy(str);
|
ref.copyChars(str);
|
||||||
int count = hash.size();
|
int count = hash.size();
|
||||||
int key = hash.add(ref);
|
int key = hash.add(ref);
|
||||||
|
|
||||||
|
@ -288,7 +288,7 @@ public class TestBytesRefHash extends LuceneTestCase {
|
||||||
do {
|
do {
|
||||||
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
|
||||||
} while (str.length() == 0);
|
} while (str.length() == 0);
|
||||||
ref.copy(str);
|
ref.copyChars(str);
|
||||||
int count = hash.size();
|
int count = hash.size();
|
||||||
int key = hash.add(ref);
|
int key = hash.add(ref);
|
||||||
|
|
||||||
|
@ -314,7 +314,7 @@ public class TestBytesRefHash extends LuceneTestCase {
|
||||||
|
|
||||||
assertAllIn(strings, hash);
|
assertAllIn(strings, hash);
|
||||||
for (String string : strings) {
|
for (String string : strings) {
|
||||||
ref.copy(string);
|
ref.copyChars(string);
|
||||||
int key = hash.add(ref);
|
int key = hash.add(ref);
|
||||||
BytesRef bytesRef = offsetHash.get((-key)-1, scratch);
|
BytesRef bytesRef = offsetHash.get((-key)-1, scratch);
|
||||||
assertEquals(ref, bytesRef);
|
assertEquals(ref, bytesRef);
|
||||||
|
@ -334,7 +334,7 @@ public class TestBytesRefHash extends LuceneTestCase {
|
||||||
BytesRef scratch = new BytesRef();
|
BytesRef scratch = new BytesRef();
|
||||||
int count = hash.size();
|
int count = hash.size();
|
||||||
for (String string : strings) {
|
for (String string : strings) {
|
||||||
ref.copy(string);
|
ref.copyChars(string);
|
||||||
int key = hash.add(ref); // add again to check duplicates
|
int key = hash.add(ref); // add again to check duplicates
|
||||||
assertEquals(string, hash.get((-key)-1, scratch).utf8ToString());
|
assertEquals(string, hash.get((-key)-1, scratch).utf8ToString());
|
||||||
assertEquals(count, hash.size());
|
assertEquals(count, hash.size());
|
||||||
|
|
|
@ -62,7 +62,7 @@ public class TestCharsRef extends LuceneTestCase {
|
||||||
int offset = random.nextInt(charArray.length);
|
int offset = random.nextInt(charArray.length);
|
||||||
int length = charArray.length - offset;
|
int length = charArray.length - offset;
|
||||||
String str = new String(charArray, offset, length);
|
String str = new String(charArray, offset, length);
|
||||||
ref.copy(charArray, offset, length);
|
ref.copyChars(charArray, offset, length);
|
||||||
assertEquals(str, ref.toString());
|
assertEquals(str, ref.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -811,7 +811,7 @@ public class TestFSTs extends LuceneTestCase {
|
||||||
final Map<IntsRef,CountMinOutput<T>> prefixes = new HashMap<IntsRef,CountMinOutput<T>>();
|
final Map<IntsRef,CountMinOutput<T>> prefixes = new HashMap<IntsRef,CountMinOutput<T>>();
|
||||||
final IntsRef scratch = new IntsRef(10);
|
final IntsRef scratch = new IntsRef(10);
|
||||||
for(InputOutput<T> pair: pairs) {
|
for(InputOutput<T> pair: pairs) {
|
||||||
scratch.copy(pair.input);
|
scratch.copyInts(pair.input);
|
||||||
for(int idx=0;idx<=pair.input.length;idx++) {
|
for(int idx=0;idx<=pair.input.length;idx++) {
|
||||||
scratch.length = idx;
|
scratch.length = idx;
|
||||||
CountMinOutput<T> cmo = prefixes.get(scratch);
|
CountMinOutput<T> cmo = prefixes.get(scratch);
|
||||||
|
@ -819,7 +819,7 @@ public class TestFSTs extends LuceneTestCase {
|
||||||
cmo = new CountMinOutput<T>();
|
cmo = new CountMinOutput<T>();
|
||||||
cmo.count = 1;
|
cmo.count = 1;
|
||||||
cmo.output = pair.output;
|
cmo.output = pair.output;
|
||||||
prefixes.put(new IntsRef(scratch), cmo);
|
prefixes.put(IntsRef.deepCopyOf(scratch), cmo);
|
||||||
} else {
|
} else {
|
||||||
cmo.count++;
|
cmo.count++;
|
||||||
cmo.output = outputs.common(cmo.output, pair.output);
|
cmo.output = outputs.common(cmo.output, pair.output);
|
||||||
|
@ -871,7 +871,7 @@ public class TestFSTs extends LuceneTestCase {
|
||||||
} else {
|
} else {
|
||||||
// clear isLeaf for all ancestors
|
// clear isLeaf for all ancestors
|
||||||
//System.out.println(" keep");
|
//System.out.println(" keep");
|
||||||
scratch.copy(prefix);
|
scratch.copyInts(prefix);
|
||||||
scratch.length--;
|
scratch.length--;
|
||||||
while(scratch.length >= 0) {
|
while(scratch.length >= 0) {
|
||||||
final CountMinOutput<T> cmo2 = prefixes.get(scratch);
|
final CountMinOutput<T> cmo2 = prefixes.get(scratch);
|
||||||
|
@ -1633,7 +1633,7 @@ public class TestFSTs extends LuceneTestCase {
|
||||||
if (w == null) {
|
if (w == null) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
term.copy(w);
|
term.copyChars(w);
|
||||||
b.add(term, nothing);
|
b.add(term, nothing);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -185,7 +185,7 @@ public final class SynonymFilter extends TokenFilter {
|
||||||
if (outputs[count] == null) {
|
if (outputs[count] == null) {
|
||||||
outputs[count] = new CharsRef();
|
outputs[count] = new CharsRef();
|
||||||
}
|
}
|
||||||
outputs[count].copy(output, offset, len);
|
outputs[count].copyChars(output, offset, len);
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -253,7 +253,7 @@ public final class SynonymFilter extends TokenFilter {
|
||||||
|
|
||||||
input.state = captureState();
|
input.state = captureState();
|
||||||
input.consumed = false;
|
input.consumed = false;
|
||||||
input.term.copy(termAtt.buffer(), 0, termAtt.length());
|
input.term.copyChars(termAtt.buffer(), 0, termAtt.length());
|
||||||
|
|
||||||
nextWrite = rollIncr(nextWrite);
|
nextWrite = rollIncr(nextWrite);
|
||||||
|
|
||||||
|
|
|
@ -201,7 +201,7 @@ public class SynonymMap {
|
||||||
MapEntry e = workingSet.get(input);
|
MapEntry e = workingSet.get(input);
|
||||||
if (e == null) {
|
if (e == null) {
|
||||||
e = new MapEntry();
|
e = new MapEntry();
|
||||||
workingSet.put(new CharsRef(input), e); // make a copy, since we will keep around in our map
|
workingSet.put(CharsRef.deepCopyOf(input), e); // make a copy, since we will keep around in our map
|
||||||
}
|
}
|
||||||
|
|
||||||
e.ords.add(ord);
|
e.ords.add(ord);
|
||||||
|
@ -307,7 +307,7 @@ public class SynonymMap {
|
||||||
|
|
||||||
scratch.length = scratchOutput.getPosition() - scratch.offset;
|
scratch.length = scratchOutput.getPosition() - scratch.offset;
|
||||||
//System.out.println(" add input=" + input + " output=" + scratch + " offset=" + scratch.offset + " length=" + scratch.length + " count=" + count);
|
//System.out.println(" add input=" + input + " output=" + scratch + " offset=" + scratch.offset + " length=" + scratch.length + " count=" + count);
|
||||||
builder.add(input, new BytesRef(scratch));
|
builder.add(input, BytesRef.deepCopyOf(scratch));
|
||||||
}
|
}
|
||||||
|
|
||||||
FST<BytesRef> fst = builder.finish();
|
FST<BytesRef> fst = builder.finish();
|
||||||
|
|
|
@ -130,7 +130,7 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
|
||||||
GroupHead groupHead = groups.get(groupValue);
|
GroupHead groupHead = groups.get(groupValue);
|
||||||
if (groupHead == null) {
|
if (groupHead == null) {
|
||||||
groupHead = new GroupHead(groupValue, sortWithinGroup, doc);
|
groupHead = new GroupHead(groupValue, sortWithinGroup, doc);
|
||||||
groups.put(groupValue == null ? null : new BytesRef(groupValue), groupHead);
|
groups.put(groupValue == null ? null : BytesRef.deepCopyOf(groupValue), groupHead);
|
||||||
temporalResult.stop = true;
|
temporalResult.stop = true;
|
||||||
} else {
|
} else {
|
||||||
temporalResult.stop = false;
|
temporalResult.stop = false;
|
||||||
|
|
|
@ -71,10 +71,10 @@ public class TermFirstPassGroupingCollector extends AbstractFirstPassGroupingCol
|
||||||
if (groupValue == null) {
|
if (groupValue == null) {
|
||||||
return null;
|
return null;
|
||||||
} else if (reuse != null) {
|
} else if (reuse != null) {
|
||||||
reuse.copy(groupValue);
|
reuse.copyBytes(groupValue);
|
||||||
return reuse;
|
return reuse;
|
||||||
} else {
|
} else {
|
||||||
return new BytesRef(groupValue);
|
return BytesRef.deepCopyOf(groupValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -79,7 +79,7 @@ public class TermsFilter extends Filter {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (terms != null) { // TODO this check doesn't make sense, decide which variable its supposed to be for
|
if (terms != null) { // TODO this check doesn't make sense, decide which variable its supposed to be for
|
||||||
br.copy(term.bytes());
|
br.copyBytes(term.bytes());
|
||||||
if (termsEnum.seekCeil(br) == TermsEnum.SeekStatus.FOUND) {
|
if (termsEnum.seekCeil(br) == TermsEnum.SeekStatus.FOUND) {
|
||||||
docs = termsEnum.docs(acceptDocs, docs);
|
docs = termsEnum.docs(acceptDocs, docs);
|
||||||
while (docs.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
while (docs.nextDoc() != DocsEnum.NO_MORE_DOCS) {
|
||||||
|
|
|
@ -59,7 +59,7 @@ public abstract class DocValues {
|
||||||
target.length = 0;
|
target.length = 0;
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
target.copy(s);
|
target.copyChars(s);
|
||||||
return true;
|
return true;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,7 @@ public class LiteralValueSource extends ValueSource {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean bytesVal(int doc, BytesRef target) {
|
public boolean bytesVal(int doc, BytesRef target) {
|
||||||
target.copy(bytesRef);
|
target.copyBytes(bytesRef);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -540,7 +540,7 @@ public abstract class QueryParserBase {
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
// safe to ignore, because we know the number of tokens
|
// safe to ignore, because we know the number of tokens
|
||||||
}
|
}
|
||||||
return newTermQuery(new Term(field, new BytesRef(bytes)));
|
return newTermQuery(new Term(field, BytesRef.deepCopyOf(bytes)));
|
||||||
} else {
|
} else {
|
||||||
if (severalTokensAtSamePosition || (!quoted && !autoGeneratePhraseQueries)) {
|
if (severalTokensAtSamePosition || (!quoted && !autoGeneratePhraseQueries)) {
|
||||||
if (positionCount == 1 || (!quoted && !autoGeneratePhraseQueries)) {
|
if (positionCount == 1 || (!quoted && !autoGeneratePhraseQueries)) {
|
||||||
|
@ -559,7 +559,7 @@ public abstract class QueryParserBase {
|
||||||
// safe to ignore, because we know the number of tokens
|
// safe to ignore, because we know the number of tokens
|
||||||
}
|
}
|
||||||
Query currentQuery = newTermQuery(
|
Query currentQuery = newTermQuery(
|
||||||
new Term(field, new BytesRef(bytes)));
|
new Term(field, BytesRef.deepCopyOf(bytes)));
|
||||||
q.add(currentQuery, occur);
|
q.add(currentQuery, occur);
|
||||||
}
|
}
|
||||||
return q;
|
return q;
|
||||||
|
@ -592,7 +592,7 @@ public abstract class QueryParserBase {
|
||||||
multiTerms.clear();
|
multiTerms.clear();
|
||||||
}
|
}
|
||||||
position += positionIncrement;
|
position += positionIncrement;
|
||||||
multiTerms.add(new Term(field, new BytesRef(bytes)));
|
multiTerms.add(new Term(field, BytesRef.deepCopyOf(bytes)));
|
||||||
}
|
}
|
||||||
if (enablePositionIncrements) {
|
if (enablePositionIncrements) {
|
||||||
mpq.add(multiTerms.toArray(new Term[0]),position);
|
mpq.add(multiTerms.toArray(new Term[0]),position);
|
||||||
|
@ -623,9 +623,9 @@ public abstract class QueryParserBase {
|
||||||
|
|
||||||
if (enablePositionIncrements) {
|
if (enablePositionIncrements) {
|
||||||
position += positionIncrement;
|
position += positionIncrement;
|
||||||
pq.add(new Term(field, new BytesRef(bytes)),position);
|
pq.add(new Term(field, BytesRef.deepCopyOf(bytes)),position);
|
||||||
} else {
|
} else {
|
||||||
pq.add(new Term(field, new BytesRef(bytes)));
|
pq.add(new Term(field, BytesRef.deepCopyOf(bytes)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return pq;
|
return pq;
|
||||||
|
@ -808,7 +808,7 @@ public abstract class QueryParserBase {
|
||||||
throw new RuntimeException("Unable to end & close TokenStream after analyzing range part: " + part, e);
|
throw new RuntimeException("Unable to end & close TokenStream after analyzing range part: " + part, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new BytesRef(bytes);
|
return BytesRef.deepCopyOf(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -59,7 +59,7 @@ public class SpanOrTermsBuilder extends SpanBuilderBase {
|
||||||
ts.reset();
|
ts.reset();
|
||||||
while (ts.incrementToken()) {
|
while (ts.incrementToken()) {
|
||||||
termAtt.fillBytesRef();
|
termAtt.fillBytesRef();
|
||||||
SpanTermQuery stq = new SpanTermQuery(new Term(fieldName, new BytesRef(bytes)));
|
SpanTermQuery stq = new SpanTermQuery(new Term(fieldName, BytesRef.deepCopyOf(bytes)));
|
||||||
clausesList.add(stq);
|
clausesList.add(stq);
|
||||||
}
|
}
|
||||||
ts.end();
|
ts.end();
|
||||||
|
|
|
@ -64,7 +64,7 @@ public class TermsFilterBuilder implements FilterBuilder {
|
||||||
ts.reset();
|
ts.reset();
|
||||||
while (ts.incrementToken()) {
|
while (ts.incrementToken()) {
|
||||||
termAtt.fillBytesRef();
|
termAtt.fillBytesRef();
|
||||||
term = new Term(fieldName, new BytesRef(bytes));
|
term = new Term(fieldName, BytesRef.deepCopyOf(bytes));
|
||||||
tf.addTerm(term);
|
tf.addTerm(term);
|
||||||
}
|
}
|
||||||
ts.end();
|
ts.end();
|
||||||
|
|
|
@ -59,7 +59,7 @@ public class TermsQueryBuilder implements QueryBuilder {
|
||||||
ts.reset();
|
ts.reset();
|
||||||
while (ts.incrementToken()) {
|
while (ts.incrementToken()) {
|
||||||
termAtt.fillBytesRef();
|
termAtt.fillBytesRef();
|
||||||
term = new Term(fieldName, new BytesRef(bytes));
|
term = new Term(fieldName, BytesRef.deepCopyOf(bytes));
|
||||||
bq.add(new BooleanClause(new TermQuery(term), BooleanClause.Occur.SHOULD));
|
bq.add(new BooleanClause(new TermQuery(term), BooleanClause.Occur.SHOULD));
|
||||||
}
|
}
|
||||||
ts.end();
|
ts.end();
|
||||||
|
|
|
@ -436,7 +436,7 @@ public class DirectSpellChecker {
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
// add new entry in PQ
|
// add new entry in PQ
|
||||||
st.term = new BytesRef(candidateTerm);
|
st.term = BytesRef.deepCopyOf(candidateTerm);
|
||||||
st.boost = boost;
|
st.boost = boost;
|
||||||
st.docfreq = df;
|
st.docfreq = df;
|
||||||
st.termAsString = termAsString;
|
st.termAsString = termAsString;
|
||||||
|
|
|
@ -521,7 +521,7 @@ public class SpellChecker implements java.io.Closeable {
|
||||||
|
|
||||||
if (!isEmpty) {
|
if (!isEmpty) {
|
||||||
// we have a non-empty index, check if the term exists
|
// we have a non-empty index, check if the term exists
|
||||||
currentTerm.copy(word);
|
currentTerm.copyChars(word);
|
||||||
for (TermsEnum te : termsEnums) {
|
for (TermsEnum te : termsEnums) {
|
||||||
if (te.seekExact(currentTerm, false)) {
|
if (te.seekExact(currentTerm, false)) {
|
||||||
continue terms;
|
continue terms;
|
||||||
|
|
|
@ -218,7 +218,7 @@ public class ICUCollationField extends FieldType {
|
||||||
throw new RuntimeException("Unable to end & close TokenStream after analyzing range part: " + part, e);
|
throw new RuntimeException("Unable to end & close TokenStream after analyzing range part: " + part, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new BytesRef(bytes);
|
return BytesRef.deepCopyOf(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -150,7 +150,7 @@ public abstract class AnalysisRequestHandlerBase extends RequestHandlerBase {
|
||||||
|
|
||||||
while (tokenStream.incrementToken()) {
|
while (tokenStream.incrementToken()) {
|
||||||
bytesAtt.fillBytesRef();
|
bytesAtt.fillBytesRef();
|
||||||
tokens.add(new BytesRef(bytes));
|
tokens.add(BytesRef.deepCopyOf(bytes));
|
||||||
}
|
}
|
||||||
|
|
||||||
tokenStream.end();
|
tokenStream.end();
|
||||||
|
|
|
@ -207,7 +207,7 @@ public class TermsComponent extends SearchComponent {
|
||||||
if (docFreq >= freqmin && docFreq <= freqmax) {
|
if (docFreq >= freqmin && docFreq <= freqmax) {
|
||||||
// add the term to the list
|
// add the term to the list
|
||||||
if (sort) {
|
if (sort) {
|
||||||
queue.add(new CountPair<BytesRef, Integer>(new BytesRef(term), docFreq));
|
queue.add(new CountPair<BytesRef, Integer>(BytesRef.deepCopyOf(term), docFreq));
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
// TODO: handle raw somehow
|
// TODO: handle raw somehow
|
||||||
|
|
|
@ -729,7 +729,7 @@ public class SimpleFacets {
|
||||||
|
|
||||||
if (sortByCount) {
|
if (sortByCount) {
|
||||||
if (c>min) {
|
if (c>min) {
|
||||||
BytesRef termCopy = new BytesRef(term);
|
BytesRef termCopy = BytesRef.deepCopyOf(term);
|
||||||
queue.add(new CountPair<BytesRef,Integer>(termCopy, c));
|
queue.add(new CountPair<BytesRef,Integer>(termCopy, c));
|
||||||
if (queue.size()>=maxsize) min=queue.last().val;
|
if (queue.size()>=maxsize) min=queue.last().val;
|
||||||
}
|
}
|
||||||
|
|
|
@ -118,7 +118,7 @@ public class UnInvertedField extends DocTermOrds {
|
||||||
|
|
||||||
if (te.docFreq() > maxTermDocFreq) {
|
if (te.docFreq() > maxTermDocFreq) {
|
||||||
TopTerm topTerm = new TopTerm();
|
TopTerm topTerm = new TopTerm();
|
||||||
topTerm.term = new BytesRef(term);
|
topTerm.term = BytesRef.deepCopyOf(term);
|
||||||
topTerm.termNum = termNum;
|
topTerm.termNum = termNum;
|
||||||
bigTerms.put(topTerm.termNum, topTerm);
|
bigTerms.put(topTerm.termNum, topTerm);
|
||||||
|
|
||||||
|
|
|
@ -142,9 +142,9 @@ public class BoolField extends FieldType {
|
||||||
@Override
|
@Override
|
||||||
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
|
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
|
||||||
if (input.length > 0 && input.bytes[input.offset] == 'T') {
|
if (input.length > 0 && input.bytes[input.offset] == 'T') {
|
||||||
charsRef.copy(TRUE);
|
charsRef.copyChars(TRUE);
|
||||||
} else {
|
} else {
|
||||||
charsRef.copy(FALSE);
|
charsRef.copyChars(FALSE);
|
||||||
}
|
}
|
||||||
return charsRef;
|
return charsRef;
|
||||||
}
|
}
|
||||||
|
|
|
@ -240,7 +240,7 @@ public class CollationField extends FieldType {
|
||||||
throw new RuntimeException("Unable to end & close TokenStream after analyzing range part: " + part, e);
|
throw new RuntimeException("Unable to end & close TokenStream after analyzing range part: " + part, e);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new BytesRef(bytes);
|
return BytesRef.deepCopyOf(bytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -81,7 +81,7 @@ public class SortableDoubleField extends FieldType {
|
||||||
// TODO: this could be more efficient, but the sortable types should be deprecated instead
|
// TODO: this could be more efficient, but the sortable types should be deprecated instead
|
||||||
input.utf8ToChars(charsRef);
|
input.utf8ToChars(charsRef);
|
||||||
final char[] indexedToReadable = indexedToReadable(charsRef.toString()).toCharArray();
|
final char[] indexedToReadable = indexedToReadable(charsRef.toString()).toCharArray();
|
||||||
charsRef.copy(indexedToReadable, 0, indexedToReadable.length);
|
charsRef.copyChars(indexedToReadable, 0, indexedToReadable.length);
|
||||||
return charsRef;
|
return charsRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -79,7 +79,7 @@ public class SortableFloatField extends FieldType {
|
||||||
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
|
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
|
||||||
// TODO: this could be more efficient, but the sortable types should be deprecated instead
|
// TODO: this could be more efficient, but the sortable types should be deprecated instead
|
||||||
final char[] indexedToReadable = indexedToReadable(input.utf8ToChars(charsRef).toString()).toCharArray();
|
final char[] indexedToReadable = indexedToReadable(input.utf8ToChars(charsRef).toString()).toCharArray();
|
||||||
charsRef.copy(indexedToReadable, 0, indexedToReadable.length);
|
charsRef.copyChars(indexedToReadable, 0, indexedToReadable.length);
|
||||||
return charsRef;
|
return charsRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -77,7 +77,7 @@ public class SortableIntField extends FieldType {
|
||||||
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
|
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
|
||||||
// TODO: this could be more efficient, but the sortable types should be deprecated instead
|
// TODO: this could be more efficient, but the sortable types should be deprecated instead
|
||||||
final char[] indexedToReadable = indexedToReadable(input.utf8ToChars(charsRef).toString()).toCharArray();
|
final char[] indexedToReadable = indexedToReadable(input.utf8ToChars(charsRef).toString()).toCharArray();
|
||||||
charsRef.copy(indexedToReadable, 0, indexedToReadable.length);
|
charsRef.copyChars(indexedToReadable, 0, indexedToReadable.length);
|
||||||
return charsRef;
|
return charsRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -69,7 +69,7 @@ public class SortableLongField extends FieldType {
|
||||||
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
|
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
|
||||||
// TODO: this could be more efficient, but the sortable types should be deprecated instead
|
// TODO: this could be more efficient, but the sortable types should be deprecated instead
|
||||||
final char[] indexedToReadable = indexedToReadable(input.utf8ToChars(charsRef).toString()).toCharArray();
|
final char[] indexedToReadable = indexedToReadable(input.utf8ToChars(charsRef).toString()).toCharArray();
|
||||||
charsRef.copy(indexedToReadable, 0, indexedToReadable.length);
|
charsRef.copyChars(indexedToReadable, 0, indexedToReadable.length);
|
||||||
return charsRef;
|
return charsRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -846,7 +846,7 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean {
|
||||||
TermQuery key = null;
|
TermQuery key = null;
|
||||||
|
|
||||||
if (useCache) {
|
if (useCache) {
|
||||||
key = new TermQuery(new Term(deState.fieldName, new BytesRef(deState.termsEnum.term())));
|
key = new TermQuery(new Term(deState.fieldName, BytesRef.deepCopyOf(deState.termsEnum.term())));
|
||||||
DocSet result = filterCache.get(key);
|
DocSet result = filterCache.get(key);
|
||||||
if (result != null) return result;
|
if (result != null) return result;
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,7 +59,7 @@ public class TermQParserPlugin extends QParserPlugin {
|
||||||
if (ft != null) {
|
if (ft != null) {
|
||||||
ft.readableToIndexed(val, term);
|
ft.readableToIndexed(val, term);
|
||||||
} else {
|
} else {
|
||||||
term.copy(val);
|
term.copyChars(val);
|
||||||
}
|
}
|
||||||
return new TermQuery(new Term(fname, term));
|
return new TermQuery(new Term(fname, term));
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue