No need for deepCopy on makeSafe for pages field data
Since its a reference to a buffer in the PagedBytes, we don't need to deep copy it on makeSafe, just shallow copy it
This commit is contained in:
parent
2be23d2427
commit
455b5da52f
|
@ -49,7 +49,9 @@ public abstract class BytesValues {
|
||||||
public abstract boolean hasValue(int docId);
|
public abstract boolean hasValue(int docId);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Converts the provided bytes to "safe" ones from a "non" safe call made (if needed).
|
* Converts the provided bytes to "safe" ones from a "non" safe call made (if needed). Note,
|
||||||
|
* this calls makes the bytes safe for *reads*, not writes (into the same BytesRef). For example,
|
||||||
|
* it makes it safe to be placed in a map.
|
||||||
*/
|
*/
|
||||||
public BytesRef makeSafe(BytesRef bytes) {
|
public BytesRef makeSafe(BytesRef bytes) {
|
||||||
return BytesRef.deepCopyOf(bytes);
|
return BytesRef.deepCopyOf(bytes);
|
||||||
|
@ -152,6 +154,7 @@ public abstract class BytesValues {
|
||||||
protected BytesValues.WithOrdinals withOrds;
|
protected BytesValues.WithOrdinals withOrds;
|
||||||
protected Ordinals.Docs.Iter ordsIter;
|
protected Ordinals.Docs.Iter ordsIter;
|
||||||
protected final BytesRef scratch = new BytesRef();
|
protected final BytesRef scratch = new BytesRef();
|
||||||
|
|
||||||
public Multi(WithOrdinals withOrds) {
|
public Multi(WithOrdinals withOrds) {
|
||||||
this.withOrds = withOrds;
|
this.withOrds = withOrds;
|
||||||
assert withOrds.isMultiValued();
|
assert withOrds.isMultiValued();
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.fielddata.plain;
|
package org.elasticsearch.index.fielddata.plain;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.IntsRef;
|
import org.apache.lucene.util.IntsRef;
|
||||||
import org.apache.lucene.util.fst.BytesRefFSTEnum;
|
import org.apache.lucene.util.fst.BytesRefFSTEnum;
|
||||||
|
@ -35,6 +33,8 @@ import org.elasticsearch.index.fielddata.ordinals.EmptyOrdinals;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
|
||||||
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*/
|
*/
|
||||||
public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> {
|
public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<ScriptDocValues.Strings> {
|
||||||
|
@ -93,7 +93,6 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ScriptDocValues.Strings getScriptValues() {
|
public ScriptDocValues.Strings getScriptValues() {
|
||||||
assert fst != null;
|
assert fst != null;
|
||||||
|
@ -128,7 +127,6 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
||||||
protected final FST<Long> fst;
|
protected final FST<Long> fst;
|
||||||
protected final Ordinals.Docs ordinals;
|
protected final Ordinals.Docs ordinals;
|
||||||
|
|
||||||
protected final BytesRef scratch = new BytesRef();
|
|
||||||
// per-thread resources
|
// per-thread resources
|
||||||
protected final BytesReader in;
|
protected final BytesReader in;
|
||||||
protected final Arc<Long> firstArc = new Arc<Long>();
|
protected final Arc<Long> firstArc = new Arc<Long>();
|
||||||
|
@ -180,6 +178,7 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
||||||
|
|
||||||
static final class SingleHashed extends Single {
|
static final class SingleHashed extends Single {
|
||||||
private final int[] hashes;
|
private final int[] hashes;
|
||||||
|
|
||||||
SingleHashed(FST<Long> fst, Docs ordinals, int[] hashes) {
|
SingleHashed(FST<Long> fst, Docs ordinals, int[] hashes) {
|
||||||
super(fst, ordinals);
|
super(fst, ordinals);
|
||||||
this.hashes = hashes;
|
this.hashes = hashes;
|
||||||
|
@ -247,7 +246,6 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
static class Empty extends FSTBytesAtomicFieldData {
|
static class Empty extends FSTBytesAtomicFieldData {
|
||||||
|
|
||||||
Empty(int numDocs) {
|
Empty(int numDocs) {
|
||||||
|
@ -286,7 +284,4 @@ public class FSTBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<Scr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -133,6 +133,13 @@ public class PagedBytesAtomicFieldData implements AtomicFieldData.WithOrdinals<S
|
||||||
this.ordinals = ordinals;
|
this.ordinals = ordinals;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BytesRef makeSafe(BytesRef bytes) {
|
||||||
|
// when we fill from the pages bytes, we just reference an existing buffer slice, its enough
|
||||||
|
// to create a shallow copy of the bytes to be safe for "reads".
|
||||||
|
return new BytesRef(bytes.bytes, bytes.offset, bytes.length);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Ordinals.Docs ordinals() {
|
public Ordinals.Docs ordinals() {
|
||||||
return this.ordinals;
|
return this.ordinals;
|
||||||
|
|
|
@ -18,10 +18,8 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.search.facet.terms.strings;
|
package org.elasticsearch.search.facet.terms.strings;
|
||||||
|
|
||||||
|
import com.google.common.collect.ImmutableList;
|
||||||
import gnu.trove.map.hash.TObjectIntHashMap;
|
import gnu.trove.map.hash.TObjectIntHashMap;
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
|
|
||||||
import org.apache.lucene.util.ArrayUtil;
|
import org.apache.lucene.util.ArrayUtil;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
import org.apache.lucene.util.BytesRefHash;
|
import org.apache.lucene.util.BytesRefHash;
|
||||||
|
@ -33,7 +31,7 @@ import org.elasticsearch.search.facet.InternalFacet;
|
||||||
import org.elasticsearch.search.facet.terms.TermsFacet;
|
import org.elasticsearch.search.facet.terms.TermsFacet;
|
||||||
import org.elasticsearch.search.facet.terms.support.EntryPriorityQueue;
|
import org.elasticsearch.search.facet.terms.support.EntryPriorityQueue;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import java.util.Arrays;
|
||||||
|
|
||||||
public class HashedAggregator {
|
public class HashedAggregator {
|
||||||
private int missing;
|
private int missing;
|
||||||
|
@ -57,10 +55,6 @@ public class HashedAggregator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected BytesRef makesSafe(BytesRef ref, BytesValues values) {
|
|
||||||
return values.makeSafe(ref);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void addValue(BytesRef value, int hashCode, BytesValues values) {
|
public void addValue(BytesRef value, int hashCode, BytesValues values) {
|
||||||
final boolean added = hash.addNoCount(value, hashCode, values);
|
final boolean added = hash.addNoCount(value, hashCode, values);
|
||||||
assert assertHash.addNoCount(value, hashCode, values) == added : "asserting counter diverged from current counter - value: "
|
assert assertHash.addNoCount(value, hashCode, values) == added : "asserting counter diverged from current counter - value: "
|
||||||
|
|
|
@ -19,11 +19,8 @@
|
||||||
|
|
||||||
package org.elasticsearch.search.facet.termsstats.strings;
|
package org.elasticsearch.search.facet.termsstats.strings;
|
||||||
|
|
||||||
import java.io.IOException;
|
import com.google.common.collect.ImmutableList;
|
||||||
import java.util.Arrays;
|
import com.google.common.collect.Lists;
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.List;
|
|
||||||
|
|
||||||
import org.apache.lucene.index.AtomicReaderContext;
|
import org.apache.lucene.index.AtomicReaderContext;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
@ -42,8 +39,10 @@ import org.elasticsearch.search.facet.terms.strings.HashedAggregator;
|
||||||
import org.elasticsearch.search.facet.termsstats.TermsStatsFacet;
|
import org.elasticsearch.search.facet.termsstats.TermsStatsFacet;
|
||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import java.io.IOException;
|
||||||
import com.google.common.collect.Lists;
|
import java.util.Arrays;
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class TermsStatsStringFacetExecutor extends FacetExecutor {
|
public class TermsStatsStringFacetExecutor extends FacetExecutor {
|
||||||
|
|
||||||
|
@ -158,7 +157,7 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor {
|
||||||
spare.reset(value, hashCode);
|
spare.reset(value, hashCode);
|
||||||
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
||||||
if (stringEntry == null) {
|
if (stringEntry == null) {
|
||||||
HashedBytesRef theValue = new HashedBytesRef(makesSafe(value, values), hashCode);
|
HashedBytesRef theValue = new HashedBytesRef(values.makeSafe(value), hashCode);
|
||||||
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 0, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 0, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
||||||
entries.put(theValue, stringEntry);
|
entries.put(theValue, stringEntry);
|
||||||
}
|
}
|
||||||
|
@ -198,7 +197,7 @@ public class TermsStatsStringFacetExecutor extends FacetExecutor {
|
||||||
spare.reset(value, hashCode);
|
spare.reset(value, hashCode);
|
||||||
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
InternalTermsStatsStringFacet.StringEntry stringEntry = entries.get(spare);
|
||||||
if (stringEntry == null) {
|
if (stringEntry == null) {
|
||||||
HashedBytesRef theValue = new HashedBytesRef(makesSafe(value, values), hashCode);
|
HashedBytesRef theValue = new HashedBytesRef(values.makeSafe(value), hashCode);
|
||||||
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 1, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
stringEntry = new InternalTermsStatsStringFacet.StringEntry(theValue, 1, 0, 0, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY);
|
||||||
entries.put(theValue, stringEntry);
|
entries.put(theValue, stringEntry);
|
||||||
} else {
|
} else {
|
||||||
|
|
Loading…
Reference in New Issue