LUCENE-4122 Replace Payload with BytesRef.

git-svn-id: https://svn.apache.org/repos/asf/lucene/dev/trunk@1348171 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Andrzej Bialecki 2012-06-08 17:58:41 +00:00
parent 61f61ae036
commit 4efdbdb2a8
44 changed files with 153 additions and 383 deletions

View File

@ -527,6 +527,8 @@ API Changes
* LUCENE-4063: FrenchLightStemmer no longer deletes repeated digits.
(Tanguy Moal via Steve Rowe)
* LUCENE-4122: Replace Payload with BytesRef. (Andrzej Bialecki)
New features
* LUCENE-2604: Added RegexpQuery support to QueryParser. Regular expressions

View File

@ -25,7 +25,7 @@ import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
@ -93,7 +93,7 @@ public class PrefixAwareTokenFilter extends TokenStream {
} else {
previousPrefixToken.reinit(nextToken);
// Make it a deep copy
Payload p = previousPrefixToken.getPayload();
BytesRef p = previousPrefixToken.getPayload();
if (p != null) {
previousPrefixToken.setPayload(p.clone());
}

View File

@ -1,5 +1,7 @@
package org.apache.lucene.analysis.payloads;
import org.apache.lucene.util.BytesRef;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@ -17,15 +19,14 @@ package org.apache.lucene.analysis.payloads;
* limitations under the License.
*/
import org.apache.lucene.index.Payload;
/**
* Base class for payload encoders.
*
**/
public abstract class AbstractEncoder implements PayloadEncoder{
public Payload encode(char[] buffer) {
public abstract class AbstractEncoder implements PayloadEncoder {
public BytesRef encode(char[] buffer) {
return encode(buffer, 0, buffer.length);
}
}

View File

@ -1,4 +1,7 @@
package org.apache.lucene.analysis.payloads;
import org.apache.lucene.util.BytesRef;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@ -16,9 +19,6 @@ package org.apache.lucene.analysis.payloads;
* limitations under the License.
*/
import org.apache.lucene.index.Payload;
/**
* Encode a character array Float as a {@link org.apache.lucene.index.Payload}.
* <p/>
@ -27,11 +27,10 @@ import org.apache.lucene.index.Payload;
**/
public class FloatEncoder extends AbstractEncoder implements PayloadEncoder {
public Payload encode(char[] buffer, int offset, int length) {
Payload result = new Payload();
public BytesRef encode(char[] buffer, int offset, int length) {
float payload = Float.parseFloat(new String(buffer, offset, length));//TODO: improve this so that we don't have to new Strings
byte[] bytes = PayloadHelper.encodeFloat(payload);
result.setData(bytes);
BytesRef result = new BytesRef(bytes);
return result;
}
}

View File

@ -16,12 +16,12 @@ package org.apache.lucene.analysis.payloads;
* limitations under the License.
*/
import org.apache.lucene.index.Payload;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import org.apache.lucene.util.BytesRef;
/**
* Does nothing other than convert the char array to a byte array using the specified encoding.
@ -37,15 +37,15 @@ public class IdentityEncoder extends AbstractEncoder implements PayloadEncoder{
this.charset = charset;
}
public Payload encode(char[] buffer, int offset, int length) {
public BytesRef encode(char[] buffer, int offset, int length) {
final ByteBuffer bb = charset.encode(CharBuffer.wrap(buffer, offset, length));
if (bb.hasArray()) {
return new Payload(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining());
return new BytesRef(bb.array(), bb.arrayOffset() + bb.position(), bb.remaining());
} else {
// normally it should always have an array, but who knows?
final byte[] b = new byte[bb.remaining()];
bb.get(b);
return new Payload(b);
return new BytesRef(b);
}
}
}

View File

@ -16,8 +16,8 @@ package org.apache.lucene.analysis.payloads;
* limitations under the License.
*/
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
/**
@ -28,11 +28,10 @@ import org.apache.lucene.util.ArrayUtil;
**/
public class IntegerEncoder extends AbstractEncoder implements PayloadEncoder {
public Payload encode(char[] buffer, int offset, int length) {
Payload result = new Payload();
public BytesRef encode(char[] buffer, int offset, int length) {
int payload = ArrayUtil.parseInt(buffer, offset, length);//TODO: improve this so that we don't have to new Strings
byte[] bytes = PayloadHelper.encodeInt(payload);
result.setData(bytes);
BytesRef result = new BytesRef(bytes);
return result;
}
}

View File

@ -21,7 +21,7 @@ import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
@ -33,7 +33,7 @@ import java.io.IOException;
public class NumericPayloadTokenFilter extends TokenFilter {
private String typeMatch;
private Payload thePayload;
private BytesRef thePayload;
private final PayloadAttribute payloadAtt = addAttribute(PayloadAttribute.class);
private final TypeAttribute typeAtt = addAttribute(TypeAttribute.class);
@ -41,7 +41,7 @@ public class NumericPayloadTokenFilter extends TokenFilter {
public NumericPayloadTokenFilter(TokenStream input, float payload, String typeMatch) {
super(input);
//Need to encode the payload
thePayload = new Payload(PayloadHelper.encodeFloat(payload));
thePayload = new BytesRef(PayloadHelper.encodeFloat(payload));
this.typeMatch = typeMatch;
}

View File

@ -1,4 +1,7 @@
package org.apache.lucene.analysis.payloads;
import org.apache.lucene.util.BytesRef;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@ -16,8 +19,6 @@ package org.apache.lucene.analysis.payloads;
* limitations under the License.
*/
import org.apache.lucene.index.Payload;
/**
* Mainly for use with the DelimitedPayloadTokenFilter, converts char buffers to Payload.
@ -27,14 +28,14 @@ import org.apache.lucene.index.Payload;
**/
public interface PayloadEncoder {
Payload encode(char[] buffer);
BytesRef encode(char[] buffer);
/**
* Convert a char array to a {@link org.apache.lucene.index.Payload}
* @param buffer
* @param offset
* @param length
* @return encoded {@link Payload}
* @return encoded {@link BytesRef}
*/
Payload encode(char [] buffer, int offset, int length);
BytesRef encode(char [] buffer, int offset, int length);
}

View File

@ -23,7 +23,7 @@ import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.BytesRef;
/**
@ -46,7 +46,7 @@ public class TokenOffsetPayloadTokenFilter extends TokenFilter {
byte[] data = new byte[8];
PayloadHelper.encodeInt(offsetAtt.startOffset(), data, 0);
PayloadHelper.encodeInt(offsetAtt.endOffset(), data, 4);
Payload payload = new Payload(data);
BytesRef payload = new BytesRef(data);
payAtt.setPayload(payload);
return true;
} else {

View File

@ -21,7 +21,7 @@ import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
@ -46,7 +46,7 @@ public class TypeAsPayloadTokenFilter extends TokenFilter {
if (input.incrementToken()) {
String type = typeAtt.type();
if (type != null && type.equals("") == false) {
payloadAtt.setPayload(new Payload(type.getBytes("UTF-8")));
payloadAtt.setPayload(new BytesRef(type.getBytes("UTF-8")));
}
return true;
} else {

View File

@ -26,7 +26,7 @@ import org.apache.lucene.analysis.*;
import org.apache.lucene.analysis.standard.StandardTokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.BytesRef;
public class TestAnalyzers extends BaseTokenStreamTestCase {
@ -85,7 +85,7 @@ public class TestAnalyzers extends BaseTokenStreamTestCase {
if (!hasNext) break;
// System.out.println("id="+System.identityHashCode(nextToken) + " " + t);
// System.out.println("payload=" + (int)nextToken.getPayload().toByteArray()[0]);
assertEquals(b, payloadAtt.getPayload().toByteArray()[0]);
assertEquals(b, payloadAtt.getPayload().bytes[0]);
}
}
@ -213,7 +213,7 @@ final class PayloadSetter extends TokenFilter {
}
byte[] data = new byte[1];
Payload p = new Payload(data,0,1);
BytesRef p = new BytesRef(data,0,1);
@Override
public boolean incrementToken() throws IOException {

View File

@ -20,7 +20,7 @@ import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import java.io.StringReader;
@ -109,11 +109,11 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase {
stream.reset();
assertTrue(stream.incrementToken());
assertEquals(expected, termAtt.toString());
Payload payload = payloadAtt.getPayload();
BytesRef payload = payloadAtt.getPayload();
if (payload != null) {
assertTrue(payload.length() + " does not equal: " + expectPay.length, payload.length() == expectPay.length);
assertTrue(payload.length + " does not equal: " + expectPay.length, payload.length == expectPay.length);
for (int i = 0; i < expectPay.length; i++) {
assertTrue(expectPay[i] + " does not equal: " + payload.byteAt(i), expectPay[i] == payload.byteAt(i));
assertTrue(expectPay[i] + " does not equal: " + payload.bytes[i + payload.offset], expectPay[i] == payload.bytes[i + payload.offset]);
}
} else {
@ -126,11 +126,11 @@ public class DelimitedPayloadTokenFilterTest extends LuceneTestCase {
stream.reset();
assertTrue(stream.incrementToken());
assertEquals(expected, termAtt.toString());
Payload payload = payAtt.getPayload();
BytesRef payload = payAtt.getPayload();
if (payload != null) {
assertTrue(payload.length() + " does not equal: " + expectPay.length, payload.length() == expectPay.length);
assertTrue(payload.length + " does not equal: " + expectPay.length, payload.length == expectPay.length);
for (int i = 0; i < expectPay.length; i++) {
assertTrue(expectPay[i] + " does not equal: " + payload.byteAt(i), expectPay[i] == payload.byteAt(i));
assertTrue(expectPay[i] + " does not equal: " + payload.bytes[i + payload.offset], expectPay[i] == payload.bytes[i + payload.offset]);
}
} else {

View File

@ -43,9 +43,9 @@ public class NumericPayloadTokenFilterTest extends BaseTokenStreamTestCase {
seenDogs = true;
assertTrue(typeAtt.type() + " is not equal to " + "D", typeAtt.type().equals("D") == true);
assertTrue("payloadAtt.getPayload() is null and it shouldn't be", payloadAtt.getPayload() != null);
byte [] bytes = payloadAtt.getPayload().getData();//safe here to just use the bytes, otherwise we should use offset, length
assertTrue(bytes.length + " does not equal: " + payloadAtt.getPayload().length(), bytes.length == payloadAtt.getPayload().length());
assertTrue(payloadAtt.getPayload().getOffset() + " does not equal: " + 0, payloadAtt.getPayload().getOffset() == 0);
byte [] bytes = payloadAtt.getPayload().bytes;//safe here to just use the bytes, otherwise we should use offset, length
assertTrue(bytes.length + " does not equal: " + payloadAtt.getPayload().length, bytes.length == payloadAtt.getPayload().length);
assertTrue(payloadAtt.getPayload().offset + " does not equal: " + 0, payloadAtt.getPayload().offset == 0);
float pay = PayloadHelper.decodeFloat(bytes);
assertTrue(pay + " does not equal: " + 3, pay == 3);
} else {

View File

@ -20,7 +20,7 @@ import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
import java.io.StringReader;
@ -36,9 +36,9 @@ public class TokenOffsetPayloadTokenFilterTest extends BaseTokenStreamTestCase {
OffsetAttribute offsetAtt = nptf.getAttribute(OffsetAttribute.class);
nptf.reset();
while (nptf.incrementToken()) {
Payload pay = payloadAtt.getPayload();
BytesRef pay = payloadAtt.getPayload();
assertTrue("pay is null and it shouldn't be", pay != null);
byte [] data = pay.getData();
byte [] data = pay.bytes;
int start = PayloadHelper.decodeInt(data, 0);
assertTrue(start + " does not equal: " + offsetAtt.startOffset(), start == offsetAtt.startOffset());
int end = PayloadHelper.decodeInt(data, 4);

View File

@ -41,7 +41,7 @@ public class TypeAsPayloadTokenFilterTest extends BaseTokenStreamTestCase {
while (nptf.incrementToken()) {
assertTrue(typeAtt.type() + " is not null and it should be", typeAtt.type().equals(String.valueOf(Character.toUpperCase(termAtt.buffer()[0]))));
assertTrue("nextToken.getPayload() is null and it shouldn't be", payloadAtt.getPayload() != null);
String type = new String(payloadAtt.getPayload().getData(), "UTF-8");
String type = new String(payloadAtt.getPayload().bytes, "UTF-8");
assertTrue(type + " is not equal to " + typeAtt.type(), type.equals(typeAtt.type()) == true);
count++;
}

View File

@ -24,7 +24,6 @@ import org.apache.lucene.analysis.BaseTokenStreamTestCase;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.index.Payload;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.KeywordTokenizer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
@ -34,6 +33,7 @@ import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Version;
public class TestSnowball extends BaseTokenStreamTestCase {
@ -68,7 +68,7 @@ public class TestSnowball extends BaseTokenStreamTestCase {
assertEquals("wrd", typeAtt.type());
assertEquals(3, posIncAtt.getPositionIncrement());
assertEquals(77, flagsAtt.getFlags());
assertEquals(new Payload(new byte[]{0,1,2,3}), payloadAtt.getPayload());
assertEquals(new BytesRef(new byte[]{0,1,2,3}), payloadAtt.getPayload());
}
private final class TestTokenStream extends TokenStream {
@ -90,7 +90,7 @@ public class TestSnowball extends BaseTokenStreamTestCase {
offsetAtt.setOffset(2, 7);
typeAtt.setType("wrd");
posIncAtt.setPositionIncrement(3);
payloadAtt.setPayload(new Payload(new byte[]{0,1,2,3}));
payloadAtt.setPayload(new BytesRef(new byte[]{0,1,2,3}));
flagsAtt.setFlags(77);
return true;
}

View File

@ -24,12 +24,12 @@ import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.index.DocsAndPositionsEnum; // for javadoc
import org.apache.lucene.util.Attribute;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeReflector;
import org.apache.lucene.util.BytesRef;
/**
A Token is an occurrence of a term from the text of a field. It consists of
@ -127,7 +127,7 @@ public class Token extends CharTermAttributeImpl
private int startOffset,endOffset;
private String type = DEFAULT_TYPE;
private int flags;
private Payload payload;
private BytesRef payload;
private int positionIncrement = 1;
private int positionLength = 1;
@ -357,14 +357,14 @@ public class Token extends CharTermAttributeImpl
/**
* Returns this Token's payload.
*/
public Payload getPayload() {
public BytesRef getPayload() {
return this.payload;
}
/**
* Sets this Token's payload.
*/
public void setPayload(Payload payload) {
public void setPayload(BytesRef payload) {
this.payload = payload;
}

View File

@ -17,8 +17,8 @@ package org.apache.lucene.analysis.tokenattributes;
* limitations under the License.
*/
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.Attribute;
import org.apache.lucene.util.BytesRef;
/**
* The payload of a Token. See also {@link Payload}.
@ -27,10 +27,10 @@ public interface PayloadAttribute extends Attribute {
/**
* Returns this Token's payload.
*/
public Payload getPayload();
public BytesRef getPayload();
/**
* Sets this Token's payload.
*/
public void setPayload(Payload payload);
public void setPayload(BytesRef payload);
}

View File

@ -17,14 +17,14 @@ package org.apache.lucene.analysis.tokenattributes;
* limitations under the License.
*/
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.BytesRef;
/**
* The payload of a Token. See also {@link Payload}.
*/
public class PayloadAttributeImpl extends AttributeImpl implements PayloadAttribute, Cloneable {
private Payload payload;
private BytesRef payload;
/**
* Initialize this attribute with no payload.
@ -34,21 +34,21 @@ public class PayloadAttributeImpl extends AttributeImpl implements PayloadAttrib
/**
* Initialize this attribute with the given payload.
*/
public PayloadAttributeImpl(Payload payload) {
public PayloadAttributeImpl(BytesRef payload) {
this.payload = payload;
}
/**
* Returns this Token's payload.
*/
public Payload getPayload() {
public BytesRef getPayload() {
return this.payload;
}
/**
* Sets this Token's payload.
*/
public void setPayload(Payload payload) {
public void setPayload(BytesRef payload) {
this.payload = payload;
}

View File

@ -128,7 +128,7 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem
void writeProx(final int termID, int proxCode) {
//System.out.println("writeProx termID=" + termID + " proxCode=" + proxCode);
assert hasProx;
final Payload payload;
final BytesRef payload;
if (payloadAttribute == null) {
payload = null;
} else {
@ -138,7 +138,7 @@ final class FreqProxTermsWriterPerField extends TermsHashConsumerPerField implem
if (payload != null && payload.length > 0) {
termsHashPerField.writeVInt(1, (proxCode<<1)|1);
termsHashPerField.writeVInt(1, payload.length);
termsHashPerField.writeBytes(1, payload.data, payload.offset, payload.length);
termsHashPerField.writeBytes(1, payload.bytes, payload.offset, payload.length);
hasPayloads = true;
} else {
termsHashPerField.writeVInt(1, proxCode<<1);

View File

@ -1,199 +0,0 @@
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.util.ArrayUtil;
/**
* A Payload is metadata that can be stored together with each occurrence
* of a term. This metadata is stored inline in the posting list of the
* specific term.
* <p>
* To store payloads in the index a {@link TokenStream} has to be used that
* produces payload data.
* <p>
* Use {@link DocsAndPositionsEnum#getPayload()}
* to retrieve the payloads from the index.<br>
*
*/
public class Payload implements Cloneable {
/** the byte array containing the payload data */
protected byte[] data;
/** the offset within the byte array */
protected int offset;
/** the length of the payload data */
protected int length;
/** Creates an empty payload and does not allocate a byte array. */
public Payload() {
// nothing to do
}
/**
* Creates a new payload with the the given array as data.
* A reference to the passed-in array is held, i. e. no
* copy is made.
*
* @param data the data of this payload
*/
public Payload(byte[] data) {
this(data, 0, data.length);
}
/**
* Creates a new payload with the the given array as data.
* A reference to the passed-in array is held, i. e. no
* copy is made.
*
* @param data the data of this payload
* @param offset the offset in the data byte array
* @param length the length of the data
*/
public Payload(byte[] data, int offset, int length) {
if (offset < 0 || offset + length > data.length) {
throw new IllegalArgumentException();
}
this.data = data;
this.offset = offset;
this.length = length;
}
/**
* Sets this payloads data.
* A reference to the passed-in array is held, i. e. no
* copy is made.
*/
public void setData(byte[] data) {
setData(data, 0, data.length);
}
/**
* Sets this payloads data.
* A reference to the passed-in array is held, i. e. no
* copy is made.
*/
public void setData(byte[] data, int offset, int length) {
this.data = data;
this.offset = offset;
this.length = length;
}
/**
* Returns a reference to the underlying byte array
* that holds this payloads data.
*/
public byte[] getData() {
return this.data;
}
/**
* Returns the offset in the underlying byte array
*/
public int getOffset() {
return this.offset;
}
/**
* Returns the length of the payload data.
*/
public int length() {
return this.length;
}
/**
* Returns the byte at the given index.
*/
public byte byteAt(int index) {
if (0 <= index && index < this.length) {
return this.data[this.offset + index];
}
throw new ArrayIndexOutOfBoundsException(index);
}
/**
* Allocates a new byte array, copies the payload data into it and returns it.
*/
public byte[] toByteArray() {
byte[] retArray = new byte[this.length];
System.arraycopy(this.data, this.offset, retArray, 0, this.length);
return retArray;
}
/**
* Copies the payload data to a byte array.
*
* @param target the target byte array
* @param targetOffset the offset in the target byte array
*/
public void copyTo(byte[] target, int targetOffset) {
if (this.length > target.length + targetOffset) {
throw new ArrayIndexOutOfBoundsException();
}
System.arraycopy(this.data, this.offset, target, targetOffset, this.length);
}
/**
* Clones this payload by creating a copy of the underlying
* byte array.
*/
@Override
public Payload clone() {
try {
// Start with a shallow copy of data
Payload clone = (Payload) super.clone();
// Only copy the part of data that belongs to this Payload
if (offset == 0 && length == data.length) {
// It is the whole thing, so just clone it.
clone.data = data.clone();
}
else {
// Just get the part
clone.data = this.toByteArray();
clone.offset = 0;
}
return clone;
} catch (CloneNotSupportedException e) {
throw new RuntimeException(e); // shouldn't happen
}
}
@Override
public boolean equals(Object obj) {
if (obj == this)
return true;
if (obj instanceof Payload) {
Payload other = (Payload) obj;
if (length == other.length) {
for(int i=0;i<length;i++)
if (data[offset+i] != other.data[other.offset+i])
return false;
return true;
} else
return false;
} else
return false;
}
@Override
public int hashCode() {
return ArrayUtil.hashCode(data, offset, offset+length);
}
}

View File

@ -17,7 +17,6 @@ package org.apache.lucene.analysis;
* limitations under the License.
*/
import org.apache.lucene.index.Payload;
import org.apache.lucene.analysis.tokenattributes.*;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.Attribute;
@ -183,7 +182,7 @@ public class TestToken extends LuceneTestCase {
assertEquals(t.toString(), copy.toString());
assertNotSame(buf, copy.buffer());
Payload pl = new Payload(new byte[]{1,2,3,4});
BytesRef pl = new BytesRef(new byte[]{1,2,3,4});
t.setPayload(pl);
copy = assertCloneIsEqual(t);
assertEquals(pl, copy.getPayload());
@ -204,7 +203,7 @@ public class TestToken extends LuceneTestCase {
assertEquals(t.toString(), copy.toString());
assertNotSame(buf, copy.buffer());
Payload pl = new Payload(new byte[]{1,2,3,4});
BytesRef pl = new BytesRef(new byte[]{1,2,3,4});
t.setPayload(pl);
copy = assertCopyIsEqual(t);
assertEquals(pl, copy.getPayload());

View File

@ -165,7 +165,7 @@ public class TestDocumentWriter extends LuceneTestCase {
}
if (first) {
// set payload on first position only
payloadAtt.setPayload(new Payload(new byte[]{100}));
payloadAtt.setPayload(new BytesRef(new byte[]{100}));
first = false;
}

View File

@ -1528,7 +1528,7 @@ public class TestIndexWriterExceptions extends LuceneTestCase {
Token t1 = new Token("foo", 0, 3);
t1.setPositionIncrement(Integer.MAX_VALUE-500);
if (random().nextBoolean()) {
t1.setPayload(new Payload(new byte[] { 0x1 } ));
t1.setPayload(new BytesRef(new byte[] { 0x1 } ));
}
TokenStream overflowingTokenStream = new CannedTokenStream(
new Token[] { t1 }

View File

@ -138,7 +138,7 @@ public class TestMultiLevelSkipList extends LuceneTestCase {
public boolean incrementToken() throws IOException {
boolean hasNext = input.incrementToken();
if (hasNext) {
payloadAtt.setPayload(new Payload(new byte[] { (byte) payloadCount.incrementAndGet() }));
payloadAtt.setPayload(new BytesRef(new byte[] { (byte) payloadCount.incrementAndGet() }));
}
return hasNext;
}

View File

@ -104,7 +104,7 @@ public class TestPayloadProcessorProvider extends LuceneTestCase {
called = true;
byte[] p = new byte[] { 1 };
payload.setPayload(new Payload(p));
payload.setPayload(new BytesRef(p));
term.append(t);
return true;
}

View File

@ -44,46 +44,13 @@ public class TestPayloads extends LuceneTestCase {
// Simple tests to test the Payload class
public void testPayload() throws Exception {
byte[] testData = "This is a test!".getBytes();
Payload payload = new Payload(testData);
assertEquals("Wrong payload length.", testData.length, payload.length());
BytesRef payload = new BytesRef(testData);
assertEquals("Wrong payload length.", testData.length, payload.length);
// test copyTo()
byte[] target = new byte[testData.length - 1];
try {
payload.copyTo(target, 0);
fail("Expected exception not thrown");
} catch (Exception expected) {
// expected exception
}
target = new byte[testData.length + 3];
payload.copyTo(target, 3);
for (int i = 0; i < testData.length; i++) {
assertEquals(testData[i], target[i + 3]);
}
// test toByteArray()
target = payload.toByteArray();
assertByteArrayEquals(testData, target);
// test byteAt()
for (int i = 0; i < testData.length; i++) {
assertEquals(payload.byteAt(i), testData[i]);
}
try {
payload.byteAt(testData.length + 1);
fail("Expected exception not thrown");
} catch (Exception expected) {
// expected exception
}
Payload clone = payload.clone();
assertEquals(payload.length(), clone.length());
for (int i = 0; i < payload.length(); i++) {
assertEquals(payload.byteAt(i), clone.byteAt(i));
BytesRef clone = payload.clone();
assertEquals(payload.length, clone.length);
for (int i = 0; i < payload.length; i++) {
assertEquals(payload.bytes[i + payload.offset], clone.bytes[i + clone.offset]);
}
}
@ -478,9 +445,8 @@ public class TestPayloads extends LuceneTestCase {
// Some values of the same field are to have payloads and others not
if (offset + length <= data.length && !termAttribute.toString().endsWith("NO PAYLOAD")) {
Payload p = new Payload();
BytesRef p = new BytesRef(data, offset, length);
payloadAtt.setPayload(p);
p.setData(data, offset, length);
offset += length;
} else {
payloadAtt.setPayload(null);
@ -576,7 +542,7 @@ public class TestPayloads extends LuceneTestCase {
first = false;
clearAttributes();
termAtt.append(term);
payloadAtt.setPayload(new Payload(payload));
payloadAtt.setPayload(new BytesRef(payload));
return true;
}

View File

@ -21,11 +21,11 @@ import org.apache.lucene.analysis.*;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.Payload;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.English;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.index.IndexReader;
@ -85,13 +85,13 @@ public class PayloadHelper {
if (input.incrementToken()) {
if (fieldName.equals(FIELD)) {
payloadAtt.setPayload(new Payload(payloadField));
payloadAtt.setPayload(new BytesRef(payloadField));
} else if (fieldName.equals(MULTI_FIELD)) {
if (numSeen % 2 == 0) {
payloadAtt.setPayload(new Payload(payloadMultiField1));
payloadAtt.setPayload(new BytesRef(payloadMultiField1));
}
else {
payloadAtt.setPayload(new Payload(payloadMultiField2));
payloadAtt.setPayload(new BytesRef(payloadMultiField2));
}
numSeen++;
}

View File

@ -25,7 +25,6 @@ import org.apache.lucene.document.TextField;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Norm;
import org.apache.lucene.index.Payload;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.CollectionStatistics;
@ -80,9 +79,9 @@ public class TestPayloadNearQuery extends LuceneTestCase {
boolean result = false;
if (input.incrementToken()) {
if (numSeen % 2 == 0) {
payAtt.setPayload(new Payload(payload2));
payAtt.setPayload(new BytesRef(payload2));
} else {
payAtt.setPayload(new Payload(payload4));
payAtt.setPayload(new BytesRef(payload4));
}
numSeen++;
result = true;

View File

@ -37,7 +37,6 @@ import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FieldInvertState;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Norm;
import org.apache.lucene.index.Payload;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
@ -94,12 +93,12 @@ public class TestPayloadTermQuery extends LuceneTestCase {
boolean hasNext = input.incrementToken();
if (hasNext) {
if (fieldName.equals("field")) {
payloadAtt.setPayload(new Payload(payloadField));
payloadAtt.setPayload(new BytesRef(payloadField));
} else if (fieldName.equals("multiField")) {
if (numSeen % 2 == 0) {
payloadAtt.setPayload(new Payload(payloadMultiField1));
payloadAtt.setPayload(new BytesRef(payloadMultiField1));
} else {
payloadAtt.setPayload(new Payload(payloadMultiField2));
payloadAtt.setPayload(new BytesRef(payloadMultiField2));
}
numSeen++;
}

View File

@ -30,7 +30,6 @@ import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Payload;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
@ -42,6 +41,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.English;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util._TestUtil;
@ -81,7 +81,7 @@ public class TestBasics extends LuceneTestCase {
@Override
public boolean incrementToken() throws IOException {
if (input.incrementToken()) {
payloadAttr.setPayload(new Payload(("pos: " + pos).getBytes()));
payloadAttr.setPayload(new BytesRef(("pos: " + pos).getBytes()));
pos++;
return true;
} else {
@ -411,8 +411,8 @@ public class TestBasics extends LuceneTestCase {
@Test
public void testSpanPayloadCheck() throws Exception {
SpanTermQuery term1 = new SpanTermQuery(new Term("field", "five"));
Payload pay = new Payload(("pos: " + 5).getBytes());
SpanQuery query = new SpanPayloadCheckQuery(term1, Collections.singletonList(pay.getData()));
BytesRef pay = new BytesRef(("pos: " + 5).getBytes());
SpanQuery query = new SpanPayloadCheckQuery(term1, Collections.singletonList(pay.bytes));
checkHits(query, new int[]
{1125, 1135, 1145, 1155, 1165, 1175, 1185, 1195, 1225, 1235, 1245, 1255, 1265, 1275, 1285, 1295, 1325, 1335, 1345, 1355, 1365, 1375, 1385, 1395, 1425, 1435, 1445, 1455, 1465, 1475, 1485, 1495, 1525, 1535, 1545, 1555, 1565, 1575, 1585, 1595, 1625, 1635, 1645, 1655, 1665, 1675, 1685, 1695, 1725, 1735, 1745, 1755, 1765, 1775, 1785, 1795, 1825, 1835, 1845, 1855, 1865, 1875, 1885, 1895, 1925, 1935, 1945, 1955, 1965, 1975, 1985, 1995});
assertTrue(searcher.explain(query, 1125).getValue() > 0.0f);
@ -421,16 +421,16 @@ public class TestBasics extends LuceneTestCase {
SpanNearQuery snq;
SpanQuery[] clauses;
List<byte[]> list;
Payload pay2;
BytesRef pay2;
clauses = new SpanQuery[2];
clauses[0] = term1;
clauses[1] = term2;
snq = new SpanNearQuery(clauses, 0, true);
pay = new Payload(("pos: " + 0).getBytes());
pay2 = new Payload(("pos: " + 1).getBytes());
pay = new BytesRef(("pos: " + 0).getBytes());
pay2 = new BytesRef(("pos: " + 1).getBytes());
list = new ArrayList<byte[]>();
list.add(pay.getData());
list.add(pay2.getData());
list.add(pay.bytes);
list.add(pay2.bytes);
query = new SpanNearPayloadCheckQuery(snq, list);
checkHits(query, new int[]
{500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599});
@ -439,13 +439,13 @@ public class TestBasics extends LuceneTestCase {
clauses[1] = term2;
clauses[2] = new SpanTermQuery(new Term("field", "five"));
snq = new SpanNearQuery(clauses, 0, true);
pay = new Payload(("pos: " + 0).getBytes());
pay2 = new Payload(("pos: " + 1).getBytes());
Payload pay3 = new Payload(("pos: " + 2).getBytes());
pay = new BytesRef(("pos: " + 0).getBytes());
pay2 = new BytesRef(("pos: " + 1).getBytes());
BytesRef pay3 = new BytesRef(("pos: " + 2).getBytes());
list = new ArrayList<byte[]>();
list.add(pay.getData());
list.add(pay2.getData());
list.add(pay3.getData());
list.add(pay.bytes);
list.add(pay2.bytes);
list.add(pay3.bytes);
query = new SpanNearPayloadCheckQuery(snq, list);
checkHits(query, new int[]
{505});
@ -470,14 +470,14 @@ public class TestBasics extends LuceneTestCase {
checkHits(query, new int[]{1103, 1203,1303,1403,1503,1603,1703,1803,1903});
Collection<byte[]> payloads = new ArrayList<byte[]>();
Payload pay = new Payload(("pos: " + 0).getBytes());
Payload pay2 = new Payload(("pos: " + 1).getBytes());
Payload pay3 = new Payload(("pos: " + 3).getBytes());
Payload pay4 = new Payload(("pos: " + 4).getBytes());
payloads.add(pay.getData());
payloads.add(pay2.getData());
payloads.add(pay3.getData());
payloads.add(pay4.getData());
BytesRef pay = new BytesRef(("pos: " + 0).getBytes());
BytesRef pay2 = new BytesRef(("pos: " + 1).getBytes());
BytesRef pay3 = new BytesRef(("pos: " + 3).getBytes());
BytesRef pay4 = new BytesRef(("pos: " + 4).getBytes());
payloads.add(pay.bytes);
payloads.add(pay2.bytes);
payloads.add(pay3.bytes);
payloads.add(pay4.bytes);
query = new SpanNearPayloadCheckQuery(oneThousHunThree, payloads);
checkHits(query, new int[]{1103, 1203,1303,1403,1503,1603,1703,1803,1903});

View File

@ -32,7 +32,6 @@ import org.apache.lucene.document.TextField;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Payload;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery;
@ -43,6 +42,7 @@ import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
public class TestPayloadSpans extends LuceneTestCase {
@ -505,9 +505,9 @@ public class TestPayloadSpans extends LuceneTestCase {
if (!nopayload.contains(token)) {
if (entities.contains(token)) {
payloadAtt.setPayload(new Payload((token + ":Entity:"+ pos ).getBytes()));
payloadAtt.setPayload(new BytesRef((token + ":Entity:"+ pos ).getBytes()));
} else {
payloadAtt.setPayload(new Payload((token + ":Noise:" + pos ).getBytes()));
payloadAtt.setPayload(new BytesRef((token + ":Noise:" + pos ).getBytes()));
}
}
pos += posIncrAtt.getPositionIncrement();

View File

@ -114,7 +114,9 @@ public class EnhancementsCategoryTokenizer extends CategoryTokenizer {
nBytes += enhancementBytes[i].length;
}
}
payload.setData(payloadBytes, 0, nBytes);
payload.bytes = payloadBytes;
payload.offset = 0;
payload.length = nBytes;
payloadAttribute.setPayload(payload);
}
}

View File

@ -79,7 +79,9 @@ public class AssociationListTokenizer extends CategoryListTokenizer {
}
if (payloadStream != null) {
termAttribute.setEmpty().append(categoryListTermText);
payload.setData(payloadStream.convertStreamToByteArray());
payload.bytes = payloadStream.convertStreamToByteArray();
payload.offset = 0;
payload.length = payload.bytes.length;
payloadAttribute.setPayload(payload);
payloadStream = null;
return true;

View File

@ -6,7 +6,7 @@ import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.facet.index.CategoryDocumentBuilder;
import org.apache.lucene.facet.index.attributes.CategoryAttribute;
@ -49,7 +49,7 @@ public abstract class CategoryTokenizerBase extends TokenFilter {
protected CharTermAttribute termAttribute;
/** The object used for constructing payloads. */
protected Payload payload = new Payload();
protected BytesRef payload = new BytesRef();
/** Indexing params for creating term text **/
protected FacetIndexingParams indexingParams;

View File

@ -90,7 +90,9 @@ public class CountingListTokenizer extends CategoryListTokenizer {
countingListName.getChars(0, length, termAttribute.buffer(), 0);
this.termAttribute.setLength(length);
CategoryListPayloadStream payloadStream = entry.getValue();
payload.setData(payloadStream.convertStreamToByteArray());
payload.bytes = payloadStream.convertStreamToByteArray();
payload.offset = 0;
payload.length = payload.bytes.length;
this.payloadAttribute.setPayload(payload);
return true;
}

View File

@ -14,10 +14,10 @@ import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Payload;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.UnsafeByteArrayOutputStream;
import org.apache.lucene.util.encoding.DGapIntEncoder;
@ -79,7 +79,7 @@ public class CategoryListIteratorTest extends LuceneTestCase {
encoder.encode(val);
}
encoder.close();
payload.setPayload(new Payload(buf, 0, ubaos.length()));
payload.setPayload(new BytesRef(buf, 0, ubaos.length()));
exhausted = true;
return true;

View File

@ -21,7 +21,7 @@ import java.io.IOException;
import java.util.Random;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.BytesRef;
/**
* TokenFilter that adds random fixed-length payloads.
@ -30,7 +30,7 @@ public final class MockFixedLengthPayloadFilter extends TokenFilter {
private final PayloadAttribute payloadAtt = addAttribute(PayloadAttribute.class);
private final Random random;
private final byte[] bytes;
private final Payload payload;
private final BytesRef payload;
public MockFixedLengthPayloadFilter(Random random, TokenStream in, int length) {
super(in);
@ -39,7 +39,7 @@ public final class MockFixedLengthPayloadFilter extends TokenFilter {
}
this.random = random;
this.bytes = new byte[length];
this.payload = new Payload(bytes);
this.payload = new BytesRef(bytes);
}
@Override

View File

@ -20,7 +20,7 @@ package org.apache.lucene.analysis;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.BytesRef;
import java.io.IOException;
import java.io.Reader;
@ -67,7 +67,7 @@ final class MockPayloadFilter extends TokenFilter {
@Override
public boolean incrementToken() throws IOException {
if (input.incrementToken()) {
payloadAttr.setPayload(new Payload(("pos: " + pos).getBytes()));
payloadAttr.setPayload(new BytesRef(("pos: " + pos).getBytes()));
int posIncr;
if (i % 2 == 1) {
posIncr = 1;

View File

@ -21,7 +21,7 @@ import java.io.IOException;
import java.util.Random;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.BytesRef;
/**
* TokenFilter that adds random variable-length payloads.
@ -32,19 +32,19 @@ public final class MockVariableLengthPayloadFilter extends TokenFilter {
private final PayloadAttribute payloadAtt = addAttribute(PayloadAttribute.class);
private final Random random;
private final byte[] bytes = new byte[MAXLENGTH];
private final Payload payload;
private final BytesRef payload;
public MockVariableLengthPayloadFilter(Random random, TokenStream in) {
super(in);
this.random = random;
this.payload = new Payload(bytes);
this.payload = new BytesRef(bytes);
}
@Override
public boolean incrementToken() throws IOException {
if (input.incrementToken()) {
random.nextBytes(bytes);
payload.setData(bytes, 0, random.nextInt(MAXLENGTH));
payload.length = random.nextInt(MAXLENGTH);
payloadAtt.setPayload(payload);
return true;
} else {

View File

@ -26,7 +26,6 @@ import org.apache.lucene.analysis.util.CharFilterFactory;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.analysis.util.TokenizerFactory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.Attribute;
import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.AttributeSource;
@ -273,9 +272,9 @@ public abstract class AnalysisRequestHandlerBase extends RequestHandlerBase {
k = ATTRIBUTE_MAPPING.get(k);
}
if (value instanceof Payload) {
final Payload p = (Payload) value;
value = new BytesRef(p.getData()).toString();
if (value instanceof BytesRef) {
final BytesRef p = (BytesRef) value;
value = p.toString();
}
tokenNamedList.add(k, value);

View File

@ -19,7 +19,6 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.Attribute;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.AttributeSource.State;
@ -171,7 +170,7 @@ public class JsonPreAnalyzedParser implements PreAnalyzedParser {
byte[] data = Base64.base64ToByteArray(str);
PayloadAttribute p = parent.addAttribute(PayloadAttribute.class);
if (data != null && data.length > 0) {
p.setPayload(new Payload(data));
p.setPayload(new BytesRef(data));
}
}
} else if (key.equals(FLAGS_KEY)) {
@ -248,9 +247,9 @@ public class JsonPreAnalyzedParser implements PreAnalyzedParser {
tok.put(OFFSET_START_KEY, ((OffsetAttribute)att).startOffset());
tok.put(OFFSET_END_KEY, ((OffsetAttribute)att).endOffset());
} else if (cl.isAssignableFrom(PayloadAttribute.class)) {
Payload p = ((PayloadAttribute)att).getPayload();
if (p != null && p.length() > 0) {
tok.put(PAYLOAD_KEY, Base64.byteArrayToBase64(p.getData(), p.getOffset(), p.length()));
BytesRef p = ((PayloadAttribute)att).getPayload();
if (p != null && p.length > 0) {
tok.put(PAYLOAD_KEY, Base64.byteArrayToBase64(p.bytes, p.offset, p.length));
}
} else if (cl.isAssignableFrom(PositionIncrementAttribute.class)) {
tok.put(POSINCR_KEY, ((PositionIncrementAttribute)att).getPositionIncrement());

View File

@ -33,10 +33,10 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.Payload;
import org.apache.lucene.util.Attribute;
import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.AttributeSource.State;
import org.apache.lucene.util.BytesRef;
import org.apache.solr.schema.PreAnalyzedField.ParseResult;
import org.apache.solr.schema.PreAnalyzedField.PreAnalyzedParser;
@ -437,7 +437,7 @@ public final class SimplePreAnalyzedParser implements PreAnalyzedParser {
PayloadAttribute p = a.addAttribute(PayloadAttribute.class);
byte[] data = hexToBytes(e.getValue());
if (data != null && data.length > 0) {
p.setPayload(new Payload(data));
p.setPayload(new BytesRef(data));
}
} else {
// unknown attribute
@ -498,9 +498,9 @@ public final class SimplePreAnalyzedParser implements PreAnalyzedParser {
} else if (cl.isAssignableFrom(OffsetAttribute.class)) {
tok.append("s=" + ((OffsetAttribute)att).startOffset() + ",e=" + ((OffsetAttribute)att).endOffset());
} else if (cl.isAssignableFrom(PayloadAttribute.class)) {
Payload p = ((PayloadAttribute)att).getPayload();
if (p != null && p.length() > 0) {
tok.append("p=" + bytesToHex(p.getData(), p.getOffset(), p.length()));
BytesRef p = ((PayloadAttribute)att).getPayload();
if (p != null && p.length > 0) {
tok.append("p=" + bytesToHex(p.bytes, p.offset, p.length));
} else if (tok.length() > 0) {
tok.setLength(tok.length() - 1); // remove the last comma
}

View File

@ -47,7 +47,7 @@ public class TestDelimitedPayloadTokenFilterFactory extends BaseTokenStreamTestC
while (tf.incrementToken()){
PayloadAttribute payAttr = tf.getAttribute(PayloadAttribute.class);
assertTrue("payAttr is null and it shouldn't be", payAttr != null);
byte[] payData = payAttr.getPayload().getData();
byte[] payData = payAttr.getPayload().bytes;
assertTrue("payData is null and it shouldn't be", payData != null);
assertTrue("payData is null and it shouldn't be", payData != null);
float payFloat = PayloadHelper.decodeFloat(payData);
@ -70,7 +70,7 @@ public class TestDelimitedPayloadTokenFilterFactory extends BaseTokenStreamTestC
while (tf.incrementToken()){
PayloadAttribute payAttr = tf.getAttribute(PayloadAttribute.class);
assertTrue("payAttr is null and it shouldn't be", payAttr != null);
byte[] payData = payAttr.getPayload().getData();
byte[] payData = payAttr.getPayload().bytes;
assertTrue("payData is null and it shouldn't be", payData != null);
float payFloat = PayloadHelper.decodeFloat(payData);
assertTrue(payFloat + " does not equal: " + 0.1f, payFloat == 0.1f);