LUCENE-3590: clearly mark bogus deep-copying apis in BytesRef

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1206143 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Robert Muir 2011-11-25 12:50:13 +00:00
parent e2cddbfd43
commit 3b6da22aa7
75 changed files with 190 additions and 181 deletions

View File

@ -177,7 +177,7 @@ public abstract class AbstractTestCase extends LuceneTestCase {
while (tokenStream.incrementToken()) {
termAttribute.fillBytesRef();
bytesRefs.add(new BytesRef(bytesRef));
bytesRefs.add(BytesRef.deepCopyOf(bytesRef));
}
tokenStream.end();

View File

@ -366,7 +366,7 @@ public class MemoryIndex {
ArrayIntList positions = terms.get(ref);
if (positions == null) { // term not seen before
positions = new ArrayIntList(stride);
terms.put(new BytesRef(ref), positions);
terms.put(BytesRef.deepCopyOf(ref), positions);
}
if (stride == 1) {
positions.add(pos);
@ -874,7 +874,7 @@ public class MemoryIndex {
public boolean seekExact(BytesRef text, boolean useCache) {
termUpto = Arrays.binarySearch(info.sortedTerms, text, termComparator);
if (termUpto >= 0) {
br.copy(info.sortedTerms[termUpto].getKey());
br.copyBytes(info.sortedTerms[termUpto].getKey());
return true;
} else {
return false;
@ -889,11 +889,11 @@ public class MemoryIndex {
if (termUpto >= info.sortedTerms.length) {
return SeekStatus.END;
} else {
br.copy(info.sortedTerms[termUpto].getKey());
br.copyBytes(info.sortedTerms[termUpto].getKey());
return SeekStatus.NOT_FOUND;
}
} else {
br.copy(info.sortedTerms[termUpto].getKey());
br.copyBytes(info.sortedTerms[termUpto].getKey());
return SeekStatus.FOUND;
}
}
@ -910,7 +910,7 @@ public class MemoryIndex {
if (termUpto >= info.sortedTerms.length) {
return null;
} else {
br.copy(info.sortedTerms[termUpto].getKey());
br.copyBytes(info.sortedTerms[termUpto].getKey());
return br;
}
}

View File

@ -26,13 +26,13 @@ public final class TermStats {
public long totalTermFreq;
TermStats(String field, BytesRef termtext, int df) {
this.termtext = new BytesRef(termtext);
this.termtext = BytesRef.deepCopyOf(termtext);
this.field = field;
this.docFreq = df;
}
TermStats(String field, BytesRef termtext, int df, long tf) {
this.termtext = new BytesRef(termtext);
this.termtext = BytesRef.deepCopyOf(termtext);
this.field = field;
this.docFreq = df;
this.totalTermFreq = tf;

View File

@ -224,7 +224,7 @@ public class FuzzyLikeThisQuery extends Query
totalVariantDocFreqs+=fe.docFreq();
float score=boostAtt.getBoost();
if (variantsQ.size() < MAX_VARIANTS_PER_TERM || score > minScore){
ScoreTerm st=new ScoreTerm(new Term(startTerm.field(), new BytesRef(possibleMatch)),score,startTerm);
ScoreTerm st=new ScoreTerm(new Term(startTerm.field(), BytesRef.deepCopyOf(possibleMatch)),score,startTerm);
variantsQ.insertWithOverflow(st);
minScore = variantsQ.top().score; // maintain minScore
}

View File

@ -186,7 +186,7 @@ public final class NumericTokenStream extends TokenStream {
@Override
public void reflectWith(AttributeReflector reflector) {
fillBytesRef();
reflector.reflect(TermToBytesRefAttribute.class, "bytes", new BytesRef(bytes));
reflector.reflect(TermToBytesRefAttribute.class, "bytes", BytesRef.deepCopyOf(bytes));
reflector.reflect(NumericTermAttribute.class, "shift", shift);
reflector.reflect(NumericTermAttribute.class, "rawValue", getRawValue());
reflector.reflect(NumericTermAttribute.class, "valueSize", valueSize);

View File

@ -213,7 +213,7 @@ public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttr
// Do a deep clone
t.termBuffer = new char[this.termLength];
System.arraycopy(this.termBuffer, 0, t.termBuffer, 0, this.termLength);
t.bytes = new BytesRef(bytes);
t.bytes = BytesRef.deepCopyOf(bytes);
return t;
}
@ -256,7 +256,7 @@ public class CharTermAttributeImpl extends AttributeImpl implements CharTermAttr
public void reflectWith(AttributeReflector reflector) {
reflector.reflect(CharTermAttribute.class, "term", toString());
fillBytesRef();
reflector.reflect(TermToBytesRefAttribute.class, "bytes", new BytesRef(bytes));
reflector.reflect(TermToBytesRefAttribute.class, "bytes", BytesRef.deepCopyOf(bytes));
}
@Override

View File

@ -246,9 +246,9 @@ public class IndexDocValuesField extends Field implements PerDocFieldValues {
}
setDocValuesType(type);
if (bytes == null) {
bytes = new BytesRef(value);
bytes = BytesRef.deepCopyOf(value);
} else {
bytes.copy(value);
bytes.copyBytes(value);
}
bytesComparator = comp;
}

View File

@ -116,7 +116,7 @@ class AutomatonTermsEnum extends FilteredTermsEnum {
return seekBytesRef;
}
} else {
seekBytesRef.copy(term);
seekBytesRef.copyBytes(term);
}
// seek to the next possible string;

View File

@ -455,7 +455,7 @@ class BufferedDeletesStream {
assert lastDeleteTerm == null || term.compareTo(lastDeleteTerm) > 0: "lastTerm=" + lastDeleteTerm + " vs term=" + term;
}
// TODO: we re-use term now in our merged iterable, but we shouldn't clone, instead copy for this assert
lastDeleteTerm = term == null ? null : new Term(term.field(), new BytesRef(term.bytes));
lastDeleteTerm = term == null ? null : new Term(term.field(), BytesRef.deepCopyOf(term.bytes));
return true;
}

View File

@ -726,12 +726,12 @@ public class CheckIndex {
// make sure terms arrive in order according to
// the comp
if (lastTerm == null) {
lastTerm = new BytesRef(term);
lastTerm = BytesRef.deepCopyOf(term);
} else {
if (termComp.compare(lastTerm, term) >= 0) {
throw new RuntimeException("terms out of order: lastTerm=" + lastTerm + " term=" + term);
}
lastTerm.copy(term);
lastTerm.copyBytes(term);
}
final int docFreq = termsEnum.docFreq();
@ -977,7 +977,7 @@ public class CheckIndex {
for(int i=seekCount-1;i>=0;i--) {
long ord = i*(termCount/seekCount);
termsEnum.seekExact(ord);
seekTerms[i] = new BytesRef(termsEnum.term());
seekTerms[i] = BytesRef.deepCopyOf(termsEnum.term());
}
// Seek by term

View File

@ -221,7 +221,7 @@ public class DocTermOrds {
protected void uninvert(final IndexReader reader, final BytesRef termPrefix) throws IOException {
//System.out.println("DTO uninvert field=" + field + " prefix=" + termPrefix);
final long startTime = System.currentTimeMillis();
prefix = termPrefix == null ? null : new BytesRef(termPrefix);
prefix = termPrefix == null ? null : BytesRef.deepCopyOf(termPrefix);
final int maxDoc = reader.maxDoc();
final int[] index = new int[maxDoc]; // immediate term numbers, or the index into the byte[] representing the last number

View File

@ -201,7 +201,7 @@ public final class MultiTermsEnum extends TermsEnum {
seekOpt = true;
}
lastSeekScratch.copy(term);
lastSeekScratch.copyBytes(term);
lastSeek = lastSeekScratch;
for(int i=0;i<numSubs;i++) {

View File

@ -111,7 +111,7 @@ class PrefixCodedTerms implements Iterable<Term> {
}
output.writeVInt(suffix);
output.writeBytes(term.bytes.bytes, term.bytes.offset + prefix, suffix);
lastTerm.bytes.copy(term.bytes);
lastTerm.bytes.copyBytes(term.bytes);
lastTerm.field = term.field;
} catch (IOException e) {
throw new RuntimeException(e);

View File

@ -85,7 +85,7 @@ public class BlockTermsReader extends FieldsProducer {
public FieldAndTerm(FieldAndTerm other) {
field = other.field;
term = new BytesRef(other.term);
term = BytesRef.deepCopyOf(other.term);
}
@Override
@ -437,7 +437,7 @@ public class BlockTermsReader extends FieldsProducer {
state.ord = indexEnum.ord()-1;
}
term.copy(indexEnum.term());
term.copyBytes(indexEnum.term());
//System.out.println(" seek: term=" + term.utf8ToString());
} else {
//System.out.println(" skip seek");
@ -720,7 +720,7 @@ public class BlockTermsReader extends FieldsProducer {
state.copyFrom(otherState);
seekPending = true;
indexIsCurrent = false;
term.copy(target);
term.copyBytes(target);
}
@Override
@ -757,7 +757,7 @@ public class BlockTermsReader extends FieldsProducer {
state.ord = indexEnum.ord()-1;
assert state.ord >= -1: "ord=" + state.ord;
term.copy(indexEnum.term());
term.copyBytes(indexEnum.term());
// Now, scan:
int left = (int) (ord - state.ord);

View File

@ -225,7 +225,7 @@ public class BlockTermsWriter extends FieldsConsumer {
pendingTerms = newArray;
}
final TermEntry te = pendingTerms[pendingCount];
te.term.copy(text);
te.term.copyBytes(text);
te.stats = stats;
pendingCount++;
@ -312,7 +312,7 @@ public class BlockTermsWriter extends FieldsConsumer {
bytesWriter.reset();
postingsWriter.flushTermsBlock(pendingCount, pendingCount);
lastPrevTerm.copy(pendingTerms[pendingCount-1].term);
lastPrevTerm.copyBytes(pendingTerms[pendingCount-1].term);
pendingCount = 0;
}
}

View File

@ -785,7 +785,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
// only for assert:
private boolean setSavedStartTerm(BytesRef startTerm) {
savedStartTerm = startTerm == null ? null : new BytesRef(startTerm);
savedStartTerm = startTerm == null ? null : BytesRef.deepCopyOf(startTerm);
return true;
}
@ -1847,7 +1847,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
final SeekStatus result = currentFrame.scanToTerm(target, false);
if (result == SeekStatus.END) {
term.copy(target);
term.copyBytes(target);
termExists = false;
if (next() != null) {
@ -1900,7 +1900,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
final SeekStatus result = currentFrame.scanToTerm(target, false);
if (result == SeekStatus.END) {
term.copy(target);
term.copyBytes(target);
termExists = false;
if (next() != null) {
//if (DEBUG) {
@ -2120,7 +2120,7 @@ public class BlockTreeTermsReader extends FieldsProducer {
assert otherState != null && otherState instanceof BlockTermState;
currentFrame = staticFrame;
currentFrame.state.copyFrom(otherState);
term.copy(target);
term.copyBytes(target);
currentFrame.metaDataUpto = currentFrame.getTermBlockOrd();
assert currentFrame.metaDataUpto > 0;
validIndexPrefix = 0;

View File

@ -860,7 +860,7 @@ public class BlockTreeTermsWriter extends FieldsConsumer {
//if (DEBUG) System.out.println("BTTW.finishTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment + " df=" + stats.docFreq);
blockBuilder.add(text, noOutputs.getNoOutput());
pending.add(new PendingTerm(new BytesRef(text), stats));
pending.add(new PendingTerm(BytesRef.deepCopyOf(text), stats));
postingsWriter.finishTerm(stats);
numTerms++;
}

View File

@ -447,7 +447,7 @@ public class DefaultTermVectorsReader extends TermVectorsReader {
if (nextTerm >= numTerms) {
return null;
}
term.copy(lastTerm);
term.copyBytes(lastTerm);
final int start = tvf.readVInt();
final int deltaLen = tvf.readVInt();
term.length = start + deltaLen;
@ -477,7 +477,7 @@ public class DefaultTermVectorsReader extends TermVectorsReader {
}
}
lastTerm.copy(term);
lastTerm.copyBytes(term);
nextTerm++;
return term;
}

View File

@ -132,7 +132,7 @@ public final class DefaultTermVectorsWriter extends TermVectorsWriter {
tvf.writeVInt(suffix);
tvf.writeBytes(term.bytes, term.offset + prefix, suffix);
tvf.writeVInt(freq);
lastTerm.copy(term);
lastTerm.copyBytes(term);
lastPosition = lastOffset = 0;
if (offsets && positions) {

View File

@ -141,7 +141,7 @@ public class FixedGapTermsIndexWriter extends TermsIndexWriterBase {
if (0 == numTerms % termIndexInterval) {
// save last term just before next index term so we
// can compute wasted suffix
lastTerm.copy(text);
lastTerm.copyBytes(text);
}
return false;
}
@ -172,7 +172,7 @@ public class FixedGapTermsIndexWriter extends TermsIndexWriterBase {
termLengths[numIndexTerms] = (short) indexedTermLength;
totTermLength += indexedTermLength;
lastTerm.copy(text);
lastTerm.copyBytes(text);
numIndexTerms++;
}

View File

@ -241,7 +241,7 @@ public class VariableGapTermsIndexWriter extends TermsIndexWriterBase {
//System.out.println(" YES");
return true;
} else {
lastTerm.copy(text);
lastTerm.copyBytes(text);
return false;
}
}
@ -260,7 +260,7 @@ public class VariableGapTermsIndexWriter extends TermsIndexWriterBase {
} finally {
text.length = lengthSave;
}
lastTerm.copy(text);
lastTerm.copyBytes(text);
}
@Override

View File

@ -407,7 +407,7 @@ public class Lucene3xFields extends FieldsProducer {
getTermsDict().seekEnum(termEnum, seekTermEnum.term(), true);
//newSuffixStart = downTo+4;
newSuffixStart = downTo;
scratchTerm.copy(termEnum.term().bytes());
scratchTerm.copyBytes(termEnum.term().bytes());
didSeek = true;
if (DEBUG_SURROGATES) {
System.out.println(" seek!");
@ -481,7 +481,7 @@ public class Lucene3xFields extends FieldsProducer {
// done no scanning (eg, term was precisely
// and index term, or, was in the term seek
// cache):
scratchTerm.copy(b2);
scratchTerm.copyBytes(b2);
setNewSuffixStart(prevTerm, scratchTerm);
return true;
@ -554,7 +554,7 @@ public class Lucene3xFields extends FieldsProducer {
if (termEnum.term() == null || termEnum.term().field() != internedFieldName) {
scratchTerm.length = 0;
} else {
scratchTerm.copy(termEnum.term().bytes());
scratchTerm.copyBytes(termEnum.term().bytes());
}
if (DEBUG_SURROGATES) {
@ -687,7 +687,7 @@ public class Lucene3xFields extends FieldsProducer {
// TODO: more efficient seek?
getTermsDict().seekEnum(termEnum, seekTermEnum.term(), true);
scratchTerm.copy(seekTermEnum.term().bytes());
scratchTerm.copyBytes(seekTermEnum.term().bytes());
// +3 because we don't need to check the char
// at upTo: we know it's > BMP
@ -788,7 +788,7 @@ public class Lucene3xFields extends FieldsProducer {
// We hit EOF; try end-case surrogate dance: if we
// find an E, try swapping in S, backwards:
scratchTerm.copy(term);
scratchTerm.copyBytes(term);
assert scratchTerm.offset == 0;
@ -800,7 +800,7 @@ public class Lucene3xFields extends FieldsProducer {
if (seekToNonBMP(seekTermEnum, scratchTerm, i)) {
scratchTerm.copy(seekTermEnum.term().bytes());
scratchTerm.copyBytes(seekTermEnum.term().bytes());
getTermsDict().seekEnum(termEnum, seekTermEnum.term(), useCache);
newSuffixStart = 1+i;
@ -826,7 +826,7 @@ public class Lucene3xFields extends FieldsProducer {
// We found a non-exact but non-null term; this one
// is fun -- just treat it like next, by pretending
// requested term was prev:
prevTerm.copy(term);
prevTerm.copyBytes(term);
if (DEBUG_SURROGATES) {
System.out.println(" seek hit non-exact term=" + UnicodeUtil.toHexString(t.text()));
@ -895,7 +895,7 @@ public class Lucene3xFields extends FieldsProducer {
}
// TODO: can we use STE's prevBuffer here?
prevTerm.copy(termEnum.term().bytes());
prevTerm.copyBytes(termEnum.term().bytes());
if (termEnum.next() && termEnum.term().field() == internedFieldName) {
newSuffixStart = termEnum.newSuffixStart;

View File

@ -78,7 +78,7 @@ final class TermBuffer implements Cloneable {
reset();
return;
}
bytes.copy(term.bytes());
bytes.copyBytes(term.bytes());
field = term.field().intern();
currentFieldNumber = -1;
this.term = term;
@ -90,7 +90,7 @@ final class TermBuffer implements Cloneable {
// dangerous to copy Term over, since the underlying
// BytesRef could subsequently be modified:
term = null;
bytes.copy(other.bytes);
bytes.copyBytes(other.bytes);
}
public void reset() {
@ -104,7 +104,7 @@ final class TermBuffer implements Cloneable {
return null;
if (term == null) {
term = new Term(field, new BytesRef(bytes));
term = new Term(field, BytesRef.deepCopyOf(bytes));
}
return term;
@ -116,7 +116,7 @@ final class TermBuffer implements Cloneable {
try {
clone = (TermBuffer)super.clone();
} catch (CloneNotSupportedException e) {}
clone.bytes = new BytesRef(bytes);
clone.bytes = BytesRef.deepCopyOf(bytes);
return clone;
}
}

View File

@ -213,7 +213,7 @@ public class MemoryPostingsFormat extends PostingsFormat {
System.out.println(" " + Integer.toHexString(finalBuffer[i]&0xFF));
}
}
builder.add(text, new BytesRef(spare));
builder.add(text, BytesRef.deepCopyOf(spare));
termCount++;
}

View File

@ -183,9 +183,9 @@ public final class PulsingPostingsWriter extends PostingsWriterBase {
pos.docID = currentDoc.docID;
if (payload != null && payload.length > 0) {
if (pos.payload == null) {
pos.payload = new BytesRef(payload);
pos.payload = BytesRef.deepCopyOf(payload);
} else {
pos.payload.copy(payload);
pos.payload.copyBytes(payload);
}
} else if (pos.payload != null) {
pos.payload.length = 0;

View File

@ -295,7 +295,7 @@ final class SortedBytesMergeUtils {
@Override
public BytesRef getByOrd(int ord, BytesRef bytesRef) {
bytesRef.copy(missingValue);
bytesRef.copyBytes(missingValue);
return bytesRef;
}

View File

@ -240,7 +240,7 @@ public final class FuzzyTermsEnum extends TermsEnum {
this.bottomTerm = bottomTerm;
// clone the term before potentially doing something with it
// this is a rare but wonderful occurrence anyway
queuedBottom = new BytesRef(term);
queuedBottom = BytesRef.deepCopyOf(term);
}
return term;

View File

@ -91,12 +91,12 @@ public abstract class TopTermsRewrite<Q extends Query> extends TermCollectingRew
private BytesRef lastTerm;
private boolean compareToLastTerm(BytesRef t) throws IOException {
if (lastTerm == null && t != null) {
lastTerm = new BytesRef(t);
lastTerm = BytesRef.deepCopyOf(t);
} else if (t == null) {
lastTerm = null;
} else {
assert termsEnum.getComparator().compare(lastTerm, t) < 0: "lastTerm=" + lastTerm + " t=" + t;
lastTerm.copy(t);
lastTerm.copyBytes(t);
}
return true;
}
@ -127,7 +127,7 @@ public abstract class TopTermsRewrite<Q extends Query> extends TermCollectingRew
t.termState.register(state, readerContext.ord, termsEnum.docFreq(), termsEnum.totalTermFreq());
} else {
// add new entry in PQ, we must clone the term, else it may get overwritten!
st.bytes.copy(bytes);
st.bytes.copyBytes(bytes);
st.boost = boost;
visitedTerms.put(st.bytes, st);
assert st.termState.docFreq() == 0;

View File

@ -24,7 +24,7 @@ import java.util.Comparator;
* use {@link #EMPTY_BYTES} if necessary.
*
* @lucene.experimental */
public final class BytesRef implements Comparable<BytesRef> {
public final class BytesRef implements Comparable<BytesRef>,Cloneable {
static final int HASH_PRIME = 31;
public static final byte[] EMPTY_BYTES = new byte[0];
@ -72,34 +72,16 @@ public final class BytesRef implements Comparable<BytesRef> {
*/
public BytesRef(CharSequence text) {
this();
copy(text);
copyChars(text);
}
public BytesRef(BytesRef other) {
this();
copy(other);
}
/* // maybe?
public BytesRef(BytesRef other, boolean shallow) {
this();
if (shallow) {
offset = other.offset;
length = other.length;
bytes = other.bytes;
} else {
copy(other);
}
}
*/
/**
* Copies the UTF8 bytes for this string.
*
* @param text Must be well-formed unicode text, with no
* unpaired surrogates or invalid UTF16 code units.
*/
public void copy(CharSequence text) {
public void copyChars(CharSequence text) {
UnicodeUtil.UTF16toUTF8(text, 0, text.length(), this);
}
@ -120,8 +102,8 @@ public final class BytesRef implements Comparable<BytesRef> {
}
@Override
public Object clone() {
return new BytesRef(this);
public BytesRef clone() {
return new BytesRef(bytes, offset, length);
}
private boolean sliceEquals(BytesRef other, int pos) {
@ -207,12 +189,12 @@ public final class BytesRef implements Comparable<BytesRef> {
}
/**
* Copies the given {@link BytesRef}
* Copies the bytes from the given {@link BytesRef}
* <p>
* NOTE: this method resets the offset to 0 and resizes the reference array
* if needed.
*/
public void copy(BytesRef other) {
public void copyBytes(BytesRef other) {
if (bytes.length < other.length) {
bytes = new byte[other.length];
}
@ -355,4 +337,17 @@ public final class BytesRef implements Comparable<BytesRef> {
return a.length - b.length;
}
}
/**
* Creates a new BytesRef that points to a copy of the bytes from
* <code>other</code>
* <p>
* The returned BytesRef will have a length of other.length
* and an offset of zero.
*/
public static BytesRef deepCopyOf(BytesRef other) {
BytesRef copy = new BytesRef();
copy.copyBytes(other);
return copy;
}
}

View File

@ -25,7 +25,7 @@ import java.util.Comparator;
* {@link #EMPTY_ARRAY} if necessary.
* @lucene.internal
*/
public final class CharsRef implements Comparable<CharsRef>, CharSequence {
public final class CharsRef implements Comparable<CharsRef>, CharSequence, Cloneable {
private static final char[] EMPTY_ARRAY = new char[0];
public char[] chars;
public int offset;
@ -68,18 +68,9 @@ public final class CharsRef implements Comparable<CharsRef>, CharSequence {
this.length = chars.length;
}
/**
* Creates a new {@link CharsRef} and copies the contents of the source into
* the new instance.
* @see #copy(CharsRef)
*/
public CharsRef(CharsRef other) {
copy(other);
}
@Override
public Object clone() {
return new CharsRef(this);
public CharsRef clone() {
return new CharsRef(chars, offset, length);
}
@Override
@ -168,7 +159,8 @@ public final class CharsRef implements Comparable<CharsRef>, CharSequence {
* @param other
* the {@link CharsRef} to copy
*/
public void copy(CharsRef other) {
// TODO: why does this behave differently/not invoke copyChars(char[], int, int) ???
public void copyChars(CharsRef other) {
if (chars == null) {
chars = new char[other.length];
} else {
@ -188,7 +180,7 @@ public final class CharsRef implements Comparable<CharsRef>, CharSequence {
/**
* Copies the given array into this CharsRef starting at offset 0
*/
public void copy(char[] otherChars, int otherOffset, int otherLength) {
public void copyChars(char[] otherChars, int otherOffset, int otherLength) {
grow(otherLength);
System.arraycopy(otherChars, otherOffset, this.chars, 0,
otherLength);
@ -275,4 +267,17 @@ public final class CharsRef implements Comparable<CharsRef>, CharSequence {
return a.length - b.length;
}
}
/**
* Creates a new CharsRef that points to a copy of the chars from
* <code>other</code>
* <p>
* The returned CharsRef will have a length of other.length
* and an offset of zero.
*/
public static CharsRef deepCopyOf(CharsRef other) {
CharsRef clone = new CharsRef();
clone.copyChars(other);
return clone;
}
}

View File

@ -22,7 +22,7 @@ package org.apache.lucene.util;
* {@link #EMPTY_INTS} if necessary.
*
* @lucene.internal */
public final class IntsRef implements Comparable<IntsRef> {
public final class IntsRef implements Comparable<IntsRef>, Cloneable {
public static final int[] EMPTY_INTS = new int[0];
@ -43,13 +43,9 @@ public final class IntsRef implements Comparable<IntsRef> {
this.length = length;
}
public IntsRef(IntsRef other) {
copy(other);
}
@Override
public Object clone() {
return new IntsRef(this);
public IntsRef clone() {
return new IntsRef(ints, offset, length);
}
@Override
@ -109,7 +105,7 @@ public final class IntsRef implements Comparable<IntsRef> {
return this.length - other.length;
}
public void copy(IntsRef other) {
public void copyInts(IntsRef other) {
if (ints == null) {
ints = new int[other.length];
} else {
@ -140,4 +136,17 @@ public final class IntsRef implements Comparable<IntsRef> {
sb.append(']');
return sb.toString();
}
/**
* Creates a new IntsRef that points to a copy of the ints from
* <code>other</code>
* <p>
* The returned IntsRef will have a length of other.length
* and an offset of zero.
*/
public static IntsRef deepCopyOf(IntsRef other) {
IntsRef clone = new IntsRef();
clone.copyInts(other);
return clone;
}
}

View File

@ -462,7 +462,7 @@ public class Builder<T> {
}
// save last input
lastInput.copy(input);
lastInput.copyInts(input);
//System.out.println(" count[0]=" + frontier[0].inputCount);
}

View File

@ -30,13 +30,13 @@ public class MutableValueStr extends MutableValue {
public void copy(MutableValue source) {
MutableValueStr s = (MutableValueStr) source;
exists = s.exists;
value.copy(s.value);
value.copyBytes(s.value);
}
@Override
public MutableValue duplicate() {
MutableValueStr v = new MutableValueStr();
v.value.copy(value);
v.value.copyBytes(value);
v.exists = this.exists;
return v;
}

View File

@ -279,7 +279,7 @@ public abstract class CollationTestBase extends LuceneTestCase {
assertTrue(ts.incrementToken());
termAtt.fillBytesRef();
// ensure we make a copy of the actual bytes too
map.put(term, new BytesRef(bytes));
map.put(term, BytesRef.deepCopyOf(bytes));
}
Thread threads[] = new Thread[numThreads];

View File

@ -170,7 +170,7 @@ final class TermInfosWriter implements Closeable {
return cmp;
}
scratchBytes.copy(term);
scratchBytes.copyBytes(term);
assert lastTerm.offset == 0;
UnicodeUtil.UTF8toUTF16(lastTerm.bytes, 0, lastTerm.length, utf16Result1);
@ -255,7 +255,7 @@ final class TermInfosWriter implements Closeable {
output.writeVInt(length); // write delta length
output.writeBytes(term.bytes, start+term.offset, length); // write delta bytes
output.writeVInt(fieldNumber); // write field num
lastTerm.copy(term);
lastTerm.copyBytes(term);
}
/** Called to complete TermInfos creation. */

View File

@ -324,7 +324,7 @@ public final class DaciukMihovAutomatonBuilder {
private boolean setPrevious(CharsRef current) {
// don't need to copy, once we fix https://issues.apache.org/jira/browse/LUCENE-3277
// still, called only from assert
previous = new CharsRef(current);
previous = CharsRef.deepCopyOf(current);
return true;
}

View File

@ -77,7 +77,7 @@ public class Test2BTerms extends LuceneTestCase {
random.nextBytes(bytes.bytes);
tokenCount++;
if (--nextSave == 0) {
savedTerms.add(new BytesRef(bytes));
savedTerms.add(BytesRef.deepCopyOf(bytes));
System.out.println("TEST: save term=" + bytes);
nextSave = _TestUtil.nextInt(random, 500000, 1000000);
}
@ -231,7 +231,7 @@ public class Test2BTerms extends LuceneTestCase {
BytesRef term;
while((term = termsEnum.next()) != null) {
if (--nextSave == 0) {
savedTerms.add(new BytesRef(term));
savedTerms.add(BytesRef.deepCopyOf(term));
System.out.println("TEST: add " + term);
nextSave = _TestUtil.nextInt(random, 500000, 1000000);
}

View File

@ -75,7 +75,7 @@ public class TestDocumentsWriterDeleteQueue extends LuceneTestCase {
HashSet<Term> frozenSet = new HashSet<Term>();
for (Term t : queue.freezeGlobalBuffer(null).termsIterable()) {
BytesRef bytesRef = new BytesRef();
bytesRef.copy(t.bytes);
bytesRef.copyBytes(t.bytes);
frozenSet.add(new Term(t.field, bytesRef));
}
assertEquals(uniqueValues, frozenSet);
@ -204,7 +204,7 @@ public class TestDocumentsWriterDeleteQueue extends LuceneTestCase {
Set<Term> frozenSet = new HashSet<Term>();
for (Term t : queue.freezeGlobalBuffer(null).termsIterable()) {
BytesRef bytesRef = new BytesRef();
bytesRef.copy(t.bytes);
bytesRef.copyBytes(t.bytes);
frozenSet.add(new Term(t.field, bytesRef));
}
assertEquals("num deletes must be 0 after freeze", 0, queue

View File

@ -148,7 +148,7 @@ public class TestIndexWriterUnicode extends LuceneTestCase {
}
assertTrue(last.compareTo(term) < 0);
last.copy(term);
last.copyBytes(term);
final String s = term.utf8ToString();
assertTrue("term " + termDesc(s) + " was not added to index (count=" + allTerms.size() + ")", allTerms.contains(s));

View File

@ -65,7 +65,7 @@ public class TestTermsEnum extends LuceneTestCase {
final TermsEnum termsEnum = MultiFields.getTerms(r, "body").iterator(null);
BytesRef term;
while((term = termsEnum.next()) != null) {
terms.add(new BytesRef(term));
terms.add(BytesRef.deepCopyOf(term));
}
if (VERBOSE) {
System.out.println("TEST: " + terms.size() + " terms");
@ -310,7 +310,7 @@ public class TestTermsEnum extends LuceneTestCase {
if (startTerm == null) {
loc = 0;
} else {
loc = Arrays.binarySearch(termsArray, new BytesRef(startTerm));
loc = Arrays.binarySearch(termsArray, BytesRef.deepCopyOf(startTerm));
if (loc < 0) {
loc = -(loc+1);
} else {
@ -648,7 +648,7 @@ public class TestTermsEnum extends LuceneTestCase {
} else {
// pick valid term
loc = random.nextInt(validTerms.length);
t = new BytesRef(validTerms[loc]);
t = BytesRef.deepCopyOf(validTerms[loc]);
termState = null;
if (VERBOSE) {
System.out.println("\nTEST: valid term=" + t.utf8ToString());

View File

@ -166,7 +166,7 @@ public class TestTermsEnum2 extends LuceneTestCase {
Automaton expected = BasicOperations.intersection(termsAutomaton, automaton);
TreeSet<BytesRef> found = new TreeSet<BytesRef>();
while (te.next() != null) {
found.add(new BytesRef(te.term()));
found.add(BytesRef.deepCopyOf(te.term()));
}
Automaton actual = DaciukMihovAutomatonBuilder.build(found);

View File

@ -121,10 +121,10 @@ public class TestSurrogates extends LuceneTestCase {
System.out.println();
}
if (lastText == null) {
lastText = new BytesRef(text);
lastText = BytesRef.deepCopyOf(text);
} else {
assertTrue(lastText.compareTo(text) < 0);
lastText.copy(text);
lastText.copyBytes(text);
}
assertEquals(exp.field(), field);
assertEquals(exp.bytes(), text);

View File

@ -134,8 +134,8 @@ public class TestDocValues extends LuceneTestCase {
// random string was after our last
assertTrue(lastRef.compareTo(bytesValue) < 0);
} else {
final BytesRef before = (BytesRef) ss.getByOrd(insertIndex-1, bytesRef)
.clone();
// TODO: I don't think this actually needs a deep copy?
final BytesRef before = BytesRef.deepCopyOf(ss.getByOrd(insertIndex-1, bytesRef));
BytesRef after = ss.getByOrd(insertIndex, bytesRef);
assertTrue(COMP.compare(before, bytesValue) < 0);
assertTrue(COMP.compare(bytesValue, after) < 0);

View File

@ -428,7 +428,7 @@ public class TestNumericRangeQuery32 extends LuceneTestCase {
if (last != null) {
assertTrue(last.compareTo(cur) < 0);
}
last = new BytesRef(cur);
last = BytesRef.deepCopyOf(cur);
}
// LUCENE-3314: the results after next() already returned null are undefined,
// assertNull(termEnum.next());

View File

@ -447,7 +447,7 @@ public class TestNumericRangeQuery64 extends LuceneTestCase {
if (last != null) {
assertTrue(last.compareTo(cur) < 0);
}
last = new BytesRef(cur);
last = BytesRef.deepCopyOf(cur);
}
// LUCENE-3314: the results after next() already returned null are undefined,
// assertNull(termEnum.next());

View File

@ -37,7 +37,7 @@ public class TestByteBlockPool extends LuceneTestCase {
final String value = _TestUtil.randomRealisticUnicodeString(random,
maxLength);
list.add(value);
ref.copy(value);
ref.copyChars(value);
pool.copy(ref);
}
RAMDirectory dir = new RAMDirectory();
@ -50,7 +50,7 @@ public class TestByteBlockPool extends LuceneTestCase {
BytesRef expected = new BytesRef();
BytesRef actual = new BytesRef();
for (String string : list) {
expected.copy(string);
expected.copyChars(string);
actual.grow(expected.length);
actual.length = expected.length;
input.readBytes(actual.bytes, 0, actual.length);

View File

@ -73,7 +73,7 @@ public class TestBytesRefHash extends LuceneTestCase {
do {
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
} while (str.length() == 0);
ref.copy(str);
ref.copyChars(str);
int count = hash.size();
int key = hash.add(ref);
if (key < 0)
@ -107,7 +107,7 @@ public class TestBytesRefHash extends LuceneTestCase {
do {
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
} while (str.length() == 0);
ref.copy(str);
ref.copyChars(str);
int count = hash.size();
int key = hash.add(ref);
if (key >= 0) {
@ -121,7 +121,7 @@ public class TestBytesRefHash extends LuceneTestCase {
}
}
for (Entry<String, Integer> entry : strings.entrySet()) {
ref.copy(entry.getKey());
ref.copyChars(entry.getKey());
assertEquals(ref, hash.get(entry.getValue().intValue(), scratch));
}
hash.clear();
@ -146,7 +146,7 @@ public class TestBytesRefHash extends LuceneTestCase {
do {
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
} while (str.length() == 0);
ref.copy(str);
ref.copyChars(str);
final int key = hash.add(ref);
if (key < 0) {
assertTrue(bits.get((-key)-1));
@ -186,7 +186,7 @@ public class TestBytesRefHash extends LuceneTestCase {
do {
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
} while (str.length() == 0);
ref.copy(str);
ref.copyChars(str);
hash.add(ref);
strings.add(str);
}
@ -197,7 +197,7 @@ public class TestBytesRefHash extends LuceneTestCase {
int i = 0;
BytesRef scratch = new BytesRef();
for (String string : strings) {
ref.copy(string);
ref.copyChars(string);
assertEquals(ref, hash.get(sort[i++], scratch));
}
hash.clear();
@ -225,7 +225,7 @@ public class TestBytesRefHash extends LuceneTestCase {
do {
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
} while (str.length() == 0);
ref.copy(str);
ref.copyChars(str);
int count = hash.size();
int key = hash.add(ref);
@ -288,7 +288,7 @@ public class TestBytesRefHash extends LuceneTestCase {
do {
str = _TestUtil.randomRealisticUnicodeString(random, 1000);
} while (str.length() == 0);
ref.copy(str);
ref.copyChars(str);
int count = hash.size();
int key = hash.add(ref);
@ -314,7 +314,7 @@ public class TestBytesRefHash extends LuceneTestCase {
assertAllIn(strings, hash);
for (String string : strings) {
ref.copy(string);
ref.copyChars(string);
int key = hash.add(ref);
BytesRef bytesRef = offsetHash.get((-key)-1, scratch);
assertEquals(ref, bytesRef);
@ -334,7 +334,7 @@ public class TestBytesRefHash extends LuceneTestCase {
BytesRef scratch = new BytesRef();
int count = hash.size();
for (String string : strings) {
ref.copy(string);
ref.copyChars(string);
int key = hash.add(ref); // add again to check duplicates
assertEquals(string, hash.get((-key)-1, scratch).utf8ToString());
assertEquals(count, hash.size());

View File

@ -62,7 +62,7 @@ public class TestCharsRef extends LuceneTestCase {
int offset = random.nextInt(charArray.length);
int length = charArray.length - offset;
String str = new String(charArray, offset, length);
ref.copy(charArray, offset, length);
ref.copyChars(charArray, offset, length);
assertEquals(str, ref.toString());
}

View File

@ -811,7 +811,7 @@ public class TestFSTs extends LuceneTestCase {
final Map<IntsRef,CountMinOutput<T>> prefixes = new HashMap<IntsRef,CountMinOutput<T>>();
final IntsRef scratch = new IntsRef(10);
for(InputOutput<T> pair: pairs) {
scratch.copy(pair.input);
scratch.copyInts(pair.input);
for(int idx=0;idx<=pair.input.length;idx++) {
scratch.length = idx;
CountMinOutput<T> cmo = prefixes.get(scratch);
@ -819,7 +819,7 @@ public class TestFSTs extends LuceneTestCase {
cmo = new CountMinOutput<T>();
cmo.count = 1;
cmo.output = pair.output;
prefixes.put(new IntsRef(scratch), cmo);
prefixes.put(IntsRef.deepCopyOf(scratch), cmo);
} else {
cmo.count++;
cmo.output = outputs.common(cmo.output, pair.output);
@ -871,7 +871,7 @@ public class TestFSTs extends LuceneTestCase {
} else {
// clear isLeaf for all ancestors
//System.out.println(" keep");
scratch.copy(prefix);
scratch.copyInts(prefix);
scratch.length--;
while(scratch.length >= 0) {
final CountMinOutput<T> cmo2 = prefixes.get(scratch);
@ -1633,7 +1633,7 @@ public class TestFSTs extends LuceneTestCase {
if (w == null) {
break;
}
term.copy(w);
term.copyChars(w);
b.add(term, nothing);
}

View File

@ -185,7 +185,7 @@ public final class SynonymFilter extends TokenFilter {
if (outputs[count] == null) {
outputs[count] = new CharsRef();
}
outputs[count].copy(output, offset, len);
outputs[count].copyChars(output, offset, len);
count++;
}
};
@ -253,7 +253,7 @@ public final class SynonymFilter extends TokenFilter {
input.state = captureState();
input.consumed = false;
input.term.copy(termAtt.buffer(), 0, termAtt.length());
input.term.copyChars(termAtt.buffer(), 0, termAtt.length());
nextWrite = rollIncr(nextWrite);

View File

@ -201,7 +201,7 @@ public class SynonymMap {
MapEntry e = workingSet.get(input);
if (e == null) {
e = new MapEntry();
workingSet.put(new CharsRef(input), e); // make a copy, since we will keep around in our map
workingSet.put(CharsRef.deepCopyOf(input), e); // make a copy, since we will keep around in our map
}
e.ords.add(ord);
@ -307,7 +307,7 @@ public class SynonymMap {
scratch.length = scratchOutput.getPosition() - scratch.offset;
//System.out.println(" add input=" + input + " output=" + scratch + " offset=" + scratch.offset + " length=" + scratch.length + " count=" + count);
builder.add(input, new BytesRef(scratch));
builder.add(input, BytesRef.deepCopyOf(scratch));
}
FST<BytesRef> fst = builder.finish();

View File

@ -130,7 +130,7 @@ public abstract class TermAllGroupHeadsCollector<GH extends AbstractAllGroupHead
GroupHead groupHead = groups.get(groupValue);
if (groupHead == null) {
groupHead = new GroupHead(groupValue, sortWithinGroup, doc);
groups.put(groupValue == null ? null : new BytesRef(groupValue), groupHead);
groups.put(groupValue == null ? null : BytesRef.deepCopyOf(groupValue), groupHead);
temporalResult.stop = true;
} else {
temporalResult.stop = false;

View File

@ -71,10 +71,10 @@ public class TermFirstPassGroupingCollector extends AbstractFirstPassGroupingCol
if (groupValue == null) {
return null;
} else if (reuse != null) {
reuse.copy(groupValue);
reuse.copyBytes(groupValue);
return reuse;
} else {
return new BytesRef(groupValue);
return BytesRef.deepCopyOf(groupValue);
}
}

View File

@ -79,7 +79,7 @@ public class TermsFilter extends Filter {
}
if (terms != null) { // TODO this check doesn't make sense, decide which variable its supposed to be for
br.copy(term.bytes());
br.copyBytes(term.bytes());
if (termsEnum.seekCeil(br) == TermsEnum.SeekStatus.FOUND) {
docs = termsEnum.docs(acceptDocs, docs);
while (docs.nextDoc() != DocsEnum.NO_MORE_DOCS) {

View File

@ -59,7 +59,7 @@ public abstract class DocValues {
target.length = 0;
return false;
}
target.copy(s);
target.copyChars(s);
return true;
};

View File

@ -55,7 +55,7 @@ public class LiteralValueSource extends ValueSource {
@Override
public boolean bytesVal(int doc, BytesRef target) {
target.copy(bytesRef);
target.copyBytes(bytesRef);
return true;
}

View File

@ -540,7 +540,7 @@ public abstract class QueryParserBase {
} catch (IOException e) {
// safe to ignore, because we know the number of tokens
}
return newTermQuery(new Term(field, new BytesRef(bytes)));
return newTermQuery(new Term(field, BytesRef.deepCopyOf(bytes)));
} else {
if (severalTokensAtSamePosition || (!quoted && !autoGeneratePhraseQueries)) {
if (positionCount == 1 || (!quoted && !autoGeneratePhraseQueries)) {
@ -559,7 +559,7 @@ public abstract class QueryParserBase {
// safe to ignore, because we know the number of tokens
}
Query currentQuery = newTermQuery(
new Term(field, new BytesRef(bytes)));
new Term(field, BytesRef.deepCopyOf(bytes)));
q.add(currentQuery, occur);
}
return q;
@ -592,7 +592,7 @@ public abstract class QueryParserBase {
multiTerms.clear();
}
position += positionIncrement;
multiTerms.add(new Term(field, new BytesRef(bytes)));
multiTerms.add(new Term(field, BytesRef.deepCopyOf(bytes)));
}
if (enablePositionIncrements) {
mpq.add(multiTerms.toArray(new Term[0]),position);
@ -623,9 +623,9 @@ public abstract class QueryParserBase {
if (enablePositionIncrements) {
position += positionIncrement;
pq.add(new Term(field, new BytesRef(bytes)),position);
pq.add(new Term(field, BytesRef.deepCopyOf(bytes)),position);
} else {
pq.add(new Term(field, new BytesRef(bytes)));
pq.add(new Term(field, BytesRef.deepCopyOf(bytes)));
}
}
return pq;
@ -808,7 +808,7 @@ public abstract class QueryParserBase {
throw new RuntimeException("Unable to end & close TokenStream after analyzing range part: " + part, e);
}
return new BytesRef(bytes);
return BytesRef.deepCopyOf(bytes);
}
/**

View File

@ -59,7 +59,7 @@ public class SpanOrTermsBuilder extends SpanBuilderBase {
ts.reset();
while (ts.incrementToken()) {
termAtt.fillBytesRef();
SpanTermQuery stq = new SpanTermQuery(new Term(fieldName, new BytesRef(bytes)));
SpanTermQuery stq = new SpanTermQuery(new Term(fieldName, BytesRef.deepCopyOf(bytes)));
clausesList.add(stq);
}
ts.end();

View File

@ -64,7 +64,7 @@ public class TermsFilterBuilder implements FilterBuilder {
ts.reset();
while (ts.incrementToken()) {
termAtt.fillBytesRef();
term = new Term(fieldName, new BytesRef(bytes));
term = new Term(fieldName, BytesRef.deepCopyOf(bytes));
tf.addTerm(term);
}
ts.end();

View File

@ -59,7 +59,7 @@ public class TermsQueryBuilder implements QueryBuilder {
ts.reset();
while (ts.incrementToken()) {
termAtt.fillBytesRef();
term = new Term(fieldName, new BytesRef(bytes));
term = new Term(fieldName, BytesRef.deepCopyOf(bytes));
bq.add(new BooleanClause(new TermQuery(term), BooleanClause.Occur.SHOULD));
}
ts.end();

View File

@ -436,7 +436,7 @@ public class DirectSpellChecker {
continue;
// add new entry in PQ
st.term = new BytesRef(candidateTerm);
st.term = BytesRef.deepCopyOf(candidateTerm);
st.boost = boost;
st.docfreq = df;
st.termAsString = termAsString;

View File

@ -521,7 +521,7 @@ public class SpellChecker implements java.io.Closeable {
if (!isEmpty) {
// we have a non-empty index, check if the term exists
currentTerm.copy(word);
currentTerm.copyChars(word);
for (TermsEnum te : termsEnums) {
if (te.seekExact(currentTerm, false)) {
continue terms;

View File

@ -218,7 +218,7 @@ public class ICUCollationField extends FieldType {
throw new RuntimeException("Unable to end & close TokenStream after analyzing range part: " + part, e);
}
return new BytesRef(bytes);
return BytesRef.deepCopyOf(bytes);
}
@Override

View File

@ -150,7 +150,7 @@ public abstract class AnalysisRequestHandlerBase extends RequestHandlerBase {
while (tokenStream.incrementToken()) {
bytesAtt.fillBytesRef();
tokens.add(new BytesRef(bytes));
tokens.add(BytesRef.deepCopyOf(bytes));
}
tokenStream.end();

View File

@ -207,7 +207,7 @@ public class TermsComponent extends SearchComponent {
if (docFreq >= freqmin && docFreq <= freqmax) {
// add the term to the list
if (sort) {
queue.add(new CountPair<BytesRef, Integer>(new BytesRef(term), docFreq));
queue.add(new CountPair<BytesRef, Integer>(BytesRef.deepCopyOf(term), docFreq));
} else {
// TODO: handle raw somehow

View File

@ -729,7 +729,7 @@ public class SimpleFacets {
if (sortByCount) {
if (c>min) {
BytesRef termCopy = new BytesRef(term);
BytesRef termCopy = BytesRef.deepCopyOf(term);
queue.add(new CountPair<BytesRef,Integer>(termCopy, c));
if (queue.size()>=maxsize) min=queue.last().val;
}

View File

@ -118,7 +118,7 @@ public class UnInvertedField extends DocTermOrds {
if (te.docFreq() > maxTermDocFreq) {
TopTerm topTerm = new TopTerm();
topTerm.term = new BytesRef(term);
topTerm.term = BytesRef.deepCopyOf(term);
topTerm.termNum = termNum;
bigTerms.put(topTerm.termNum, topTerm);

View File

@ -142,9 +142,9 @@ public class BoolField extends FieldType {
@Override
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
if (input.length > 0 && input.bytes[input.offset] == 'T') {
charsRef.copy(TRUE);
charsRef.copyChars(TRUE);
} else {
charsRef.copy(FALSE);
charsRef.copyChars(FALSE);
}
return charsRef;
}

View File

@ -240,7 +240,7 @@ public class CollationField extends FieldType {
throw new RuntimeException("Unable to end & close TokenStream after analyzing range part: " + part, e);
}
return new BytesRef(bytes);
return BytesRef.deepCopyOf(bytes);
}
@Override

View File

@ -81,7 +81,7 @@ public class SortableDoubleField extends FieldType {
// TODO: this could be more efficient, but the sortable types should be deprecated instead
input.utf8ToChars(charsRef);
final char[] indexedToReadable = indexedToReadable(charsRef.toString()).toCharArray();
charsRef.copy(indexedToReadable, 0, indexedToReadable.length);
charsRef.copyChars(indexedToReadable, 0, indexedToReadable.length);
return charsRef;
}

View File

@ -79,7 +79,7 @@ public class SortableFloatField extends FieldType {
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
// TODO: this could be more efficient, but the sortable types should be deprecated instead
final char[] indexedToReadable = indexedToReadable(input.utf8ToChars(charsRef).toString()).toCharArray();
charsRef.copy(indexedToReadable, 0, indexedToReadable.length);
charsRef.copyChars(indexedToReadable, 0, indexedToReadable.length);
return charsRef;
}

View File

@ -77,7 +77,7 @@ public class SortableIntField extends FieldType {
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
// TODO: this could be more efficient, but the sortable types should be deprecated instead
final char[] indexedToReadable = indexedToReadable(input.utf8ToChars(charsRef).toString()).toCharArray();
charsRef.copy(indexedToReadable, 0, indexedToReadable.length);
charsRef.copyChars(indexedToReadable, 0, indexedToReadable.length);
return charsRef;
}

View File

@ -69,7 +69,7 @@ public class SortableLongField extends FieldType {
public CharsRef indexedToReadable(BytesRef input, CharsRef charsRef) {
// TODO: this could be more efficient, but the sortable types should be deprecated instead
final char[] indexedToReadable = indexedToReadable(input.utf8ToChars(charsRef).toString()).toCharArray();
charsRef.copy(indexedToReadable, 0, indexedToReadable.length);
charsRef.copyChars(indexedToReadable, 0, indexedToReadable.length);
return charsRef;
}

View File

@ -846,7 +846,7 @@ public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean {
TermQuery key = null;
if (useCache) {
key = new TermQuery(new Term(deState.fieldName, new BytesRef(deState.termsEnum.term())));
key = new TermQuery(new Term(deState.fieldName, BytesRef.deepCopyOf(deState.termsEnum.term())));
DocSet result = filterCache.get(key);
if (result != null) return result;
}

View File

@ -59,7 +59,7 @@ public class TermQParserPlugin extends QParserPlugin {
if (ft != null) {
ft.readableToIndexed(val, term);
} else {
term.copy(val);
term.copyChars(val);
}
return new TermQuery(new Term(fname, term));
}