remove RamUsage and use Lucene one

This commit is contained in:
Shay Banon 2013-07-29 00:02:42 +02:00
parent 22085cab0b
commit e4ad92203f
14 changed files with 69 additions and 114 deletions

View File

@ -1,53 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common;
/**
*
*/
public class RamUsage {
private static final String OS_ARCH = System.getProperty("os.arch");
private static final boolean JRE_IS_64BIT;
static {
String x = System.getProperty("sun.arch.data.model");
if (x != null) {
JRE_IS_64BIT = x.indexOf("64") != -1;
} else {
if (OS_ARCH != null && OS_ARCH.indexOf("64") != -1) {
JRE_IS_64BIT = true;
} else {
JRE_IS_64BIT = false;
}
}
}
public final static int NUM_BYTES_SHORT = 2;
public final static int NUM_BYTES_INT = 4;
public final static int NUM_BYTES_LONG = 8;
public final static int NUM_BYTES_FLOAT = 4;
public final static int NUM_BYTES_DOUBLE = 8;
public final static int NUM_BYTES_CHAR = 2;
public final static int NUM_BYTES_OBJECT_HEADER = 8;
public final static int NUM_BYTES_OBJECT_REF = JRE_IS_64BIT ? 8 : 4;
public final static int NUM_BYTES_ARRAY_HEADER = NUM_BYTES_OBJECT_HEADER + NUM_BYTES_INT + NUM_BYTES_OBJECT_REF;
}

View File

@ -20,15 +20,19 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.elasticsearch.common.RamUsage; import org.apache.lucene.util.RamUsageEstimator;
import java.util.Arrays; import java.util.Arrays;
/** Float array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of /**
* configurable length. */ * Float array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of
* configurable length.
*/
public final class BigDoubleArrayList extends AbstractBigArray { public final class BigDoubleArrayList extends AbstractBigArray {
/** Default page size, 16KB of memory per page. */ /**
* Default page size, 16KB of memory per page.
*/
private static final int DEFAULT_PAGE_SIZE = 1 << 11; private static final int DEFAULT_PAGE_SIZE = 1 << 11;
private double[][] pages; private double[][] pages;
@ -69,7 +73,7 @@ public final class BigDoubleArrayList extends AbstractBigArray {
@Override @Override
protected int numBytesPerElement() { protected int numBytesPerElement() {
return RamUsage.NUM_BYTES_DOUBLE; return RamUsageEstimator.NUM_BYTES_DOUBLE;
} }
} }

View File

@ -20,13 +20,17 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.elasticsearch.common.RamUsage; import org.apache.lucene.util.RamUsageEstimator;
/** Float array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of /**
* configurable length. */ * Float array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of
* configurable length.
*/
public final class BigFloatArrayList extends AbstractBigArray { public final class BigFloatArrayList extends AbstractBigArray {
/** Default page size, 16KB of memory per page. */ /**
* Default page size, 16KB of memory per page.
*/
private static final int DEFAULT_PAGE_SIZE = 1 << 12; private static final int DEFAULT_PAGE_SIZE = 1 << 12;
private float[][] pages; private float[][] pages;
@ -64,7 +68,7 @@ public final class BigFloatArrayList extends AbstractBigArray {
@Override @Override
protected int numBytesPerElement() { protected int numBytesPerElement() {
return RamUsage.NUM_BYTES_FLOAT; return RamUsageEstimator.NUM_BYTES_FLOAT;
} }
} }

View File

@ -19,13 +19,17 @@
package org.elasticsearch.common.util; package org.elasticsearch.common.util;
import org.elasticsearch.common.RamUsage; import org.apache.lucene.util.RamUsageEstimator;
/** Int array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of /**
* configurable length. */ * Int array abstraction able to support more than 2B values. This implementation slices data into fixed-sized blocks of
* configurable length.
*/
public final class BigIntArray extends AbstractBigArray implements IntArray { public final class BigIntArray extends AbstractBigArray implements IntArray {
/** Default page size, 16KB of memory per page. */ /**
* Default page size, 16KB of memory per page.
*/
public static final int DEFAULT_PAGE_SIZE = 1 << 12; public static final int DEFAULT_PAGE_SIZE = 1 << 12;
private int[][] pages; private int[][] pages;
@ -63,7 +67,7 @@ public final class BigIntArray extends AbstractBigArray implements IntArray {
@Override @Override
protected int numBytesPerElement() { protected int numBytesPerElement() {
return RamUsage.NUM_BYTES_INT; return RamUsageEstimator.NUM_BYTES_INT;
} }
} }

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.cache.id.simple; package org.elasticsearch.index.cache.id.simple;
import gnu.trove.impl.hash.TObjectHash; import gnu.trove.impl.hash.TObjectHash;
import org.elasticsearch.common.RamUsage; import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.bytes.HashedBytesArray; import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.trove.ExtTObjectIntHasMap; import org.elasticsearch.common.trove.ExtTObjectIntHasMap;
import org.elasticsearch.index.cache.id.IdReaderTypeCache; import org.elasticsearch.index.cache.id.IdReaderTypeCache;
@ -86,26 +86,26 @@ public class SimpleIdReaderTypeCache implements IdReaderTypeCache {
long sizeInBytes = 0; long sizeInBytes = 0;
// Ignore type field // Ignore type field
// sizeInBytes += ((type.length() * RamUsage.NUM_BYTES_CHAR) + (3 * RamUsage.NUM_BYTES_INT)) + RamUsage.NUM_BYTES_OBJECT_HEADER; // sizeInBytes += ((type.length() * RamUsage.NUM_BYTES_CHAR) + (3 * RamUsage.NUM_BYTES_INT)) + RamUsage.NUM_BYTES_OBJECT_HEADER;
sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (idToDoc._valuesSize() * RamUsage.NUM_BYTES_INT); sizeInBytes += RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (idToDoc._valuesSize() * RamUsageEstimator.NUM_BYTES_INT);
for (Object o : idToDoc._set) { for (Object o : idToDoc._set) {
if (o == TObjectHash.FREE || o == TObjectHash.REMOVED) { if (o == TObjectHash.FREE || o == TObjectHash.REMOVED) {
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_REF; sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_REF;
} else { } else {
HashedBytesArray bytesArray = (HashedBytesArray) o; HashedBytesArray bytesArray = (HashedBytesArray) o;
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsage.NUM_BYTES_INT); sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsageEstimator.NUM_BYTES_INT);
} }
} }
// The docIdToId array contains references to idToDoc for this segment or other segments, so we can use OBJECT_REF // The docIdToId array contains references to idToDoc for this segment or other segments, so we can use OBJECT_REF
sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (RamUsage.NUM_BYTES_OBJECT_REF * docIdToId.length); sizeInBytes += RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + (RamUsageEstimator.NUM_BYTES_OBJECT_REF * docIdToId.length);
for (HashedBytesArray bytesArray : parentIdsValues) { for (HashedBytesArray bytesArray : parentIdsValues) {
if (bytesArray == null) { if (bytesArray == null) {
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_REF; sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_REF;
} else { } else {
sizeInBytes += RamUsage.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsage.NUM_BYTES_INT); sizeInBytes += RamUsageEstimator.NUM_BYTES_OBJECT_HEADER + (bytesArray.length() + RamUsageEstimator.NUM_BYTES_INT);
} }
} }
sizeInBytes += RamUsage.NUM_BYTES_ARRAY_HEADER + (RamUsage.NUM_BYTES_INT * parentIdsOrdinals.length); sizeInBytes += RamUsageEstimator.sizeOf(parentIdsOrdinals);
return sizeInBytes; return sizeInBytes;
} }

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.fielddata.ordinals; package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.util.LongsRef; import org.apache.lucene.util.LongsRef;
import org.elasticsearch.common.RamUsage; import org.apache.lucene.util.RamUsageEstimator;
/** /**
* Ordinals that effectively are single valued and map "one to one" to the * Ordinals that effectively are single valued and map "one to one" to the
@ -50,7 +50,7 @@ public class DocIdOrdinals implements Ordinals {
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
return RamUsage.NUM_BYTES_OBJECT_REF; return RamUsageEstimator.NUM_BYTES_OBJECT_REF;
} }
@Override @Override

View File

@ -21,19 +21,23 @@ package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.LongsRef; import org.apache.lucene.util.LongsRef;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.AppendingLongBuffer; import org.apache.lucene.util.packed.AppendingLongBuffer;
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer; import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedInts;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs.Iter; import org.elasticsearch.index.fielddata.ordinals.Ordinals.Docs.Iter;
/** {@link Ordinals} implementation which is efficient at storing field data ordinals for multi-valued or sparse fields. */ /**
* {@link Ordinals} implementation which is efficient at storing field data ordinals for multi-valued or sparse fields.
*/
public class MultiOrdinals implements Ordinals { public class MultiOrdinals implements Ordinals {
private static final int OFFSETS_PAGE_SIZE = 1024; private static final int OFFSETS_PAGE_SIZE = 1024;
private static final int OFFSET_INIT_PAGE_COUNT = 16; private static final int OFFSET_INIT_PAGE_COUNT = 16;
/** Return true if this impl is going to be smaller than {@link SinglePackedOrdinals} by at least 20%. */ /**
* Return true if this impl is going to be smaller than {@link SinglePackedOrdinals} by at least 20%.
*/
public static boolean significantlySmallerThanSinglePackedOrdinals(int maxDoc, int numDocsWithValue, long numOrds) { public static boolean significantlySmallerThanSinglePackedOrdinals(int maxDoc, int numDocsWithValue, long numOrds) {
final int bitsPerOrd = PackedInts.bitsRequired(numOrds); final int bitsPerOrd = PackedInts.bitsRequired(numOrds);
// Compute the worst-case number of bits per value for offsets in the worst case, eg. if no docs have a value at the // Compute the worst-case number of bits per value for offsets in the worst case, eg. if no docs have a value at the
@ -168,7 +172,7 @@ public class MultiOrdinals implements Ordinals {
final long endOffset = endOffsets.get(docId); final long endOffset = endOffsets.get(docId);
final int numValues = (int) (endOffset - startOffset); final int numValues = (int) (endOffset - startOffset);
if (longsScratch.length < numValues) { if (longsScratch.length < numValues) {
longsScratch.longs = new long[ArrayUtil.oversize(numValues, RamUsage.NUM_BYTES_LONG)]; longsScratch.longs = new long[ArrayUtil.oversize(numValues, RamUsageEstimator.NUM_BYTES_LONG)];
} }
for (int i = 0; i < numValues; ++i) { for (int i = 0; i < numValues; ++i) {
longsScratch.longs[i] = 1L + ords.get(startOffset + i); longsScratch.longs[i] = 1L + ords.get(startOffset + i);

View File

@ -20,8 +20,8 @@
package org.elasticsearch.index.fielddata.ordinals; package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.util.LongsRef; import org.apache.lucene.util.LongsRef;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedInts;
import org.elasticsearch.common.RamUsage;
/** /**
*/ */
@ -60,7 +60,7 @@ public class SinglePackedOrdinals implements Ordinals {
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = RamUsage.NUM_BYTES_OBJECT_REF + reader.ramBytesUsed(); size = RamUsageEstimator.NUM_BYTES_OBJECT_REF + reader.ramBytesUsed();
} }
return size; return size;
} }
@ -136,7 +136,7 @@ public class SinglePackedOrdinals implements Ordinals {
@Override @Override
public LongsRef getOrds(int docId) { public LongsRef getOrds(int docId) {
final long ordinal = reader.get(docId); final long ordinal = reader.get(docId);
if (ordinal == 0) { if (ordinal == 0) {
longsScratch.length = 0; longsScratch.length = 0;
} else { } else {
longsScratch.offset = 0; longsScratch.offset = 0;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.fielddata.plain; package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.RamUsage; import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.util.BigDoubleArrayList; import org.elasticsearch.common.util.BigDoubleArrayList;
import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.Ordinals;
@ -115,7 +115,7 @@ public abstract class DoubleArrayAtomicFieldData extends AtomicNumericFieldData
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = RamUsage.NUM_BYTES_INT/*size*/ + RamUsage.NUM_BYTES_INT/*numDocs*/ + values.sizeInBytes() + ordinals.getMemorySizeInBytes(); size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + values.sizeInBytes() + ordinals.getMemorySizeInBytes();
} }
return size; return size;
} }
@ -191,7 +191,7 @@ public abstract class DoubleArrayAtomicFieldData extends AtomicNumericFieldData
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = RamUsage.NUM_BYTES_ARRAY_HEADER + values.sizeInBytes() + (set.getBits().length * RamUsage.NUM_BYTES_LONG); size = RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + values.sizeInBytes() + RamUsageEstimator.sizeOf(set.getBits());
} }
return size; return size;
} }
@ -281,7 +281,7 @@ public abstract class DoubleArrayAtomicFieldData extends AtomicNumericFieldData
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = RamUsage.NUM_BYTES_ARRAY_HEADER + values.sizeInBytes(); size = RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + values.sizeInBytes();
} }
return size; return size;
} }

View File

@ -22,13 +22,9 @@ package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.*;
import org.apache.lucene.util.BytesRefIterator;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigDoubleArrayList; import org.elasticsearch.common.util.BigDoubleArrayList;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
@ -108,8 +104,8 @@ public class DoubleArrayIndexFieldData extends AbstractIndexFieldData<DoubleArra
final FixedBitSet set = builder.buildDocsWithValuesSet(); final FixedBitSet set = builder.buildDocsWithValuesSet();
// there's sweatspot where due to low unique value count, using ordinals will consume less memory // there's sweatspot where due to low unique value count, using ordinals will consume less memory
long singleValuesArraySize = reader.maxDoc() * RamUsage.NUM_BYTES_DOUBLE + (set == null ? 0 : set.getBits().length * RamUsage.NUM_BYTES_LONG + RamUsage.NUM_BYTES_INT); long singleValuesArraySize = reader.maxDoc() * RamUsageEstimator.NUM_BYTES_DOUBLE + (set == null ? 0 : RamUsageEstimator.sizeOf(set.getBits()) + RamUsageEstimator.NUM_BYTES_INT);
long uniqueValuesArraySize = values.size() * RamUsage.NUM_BYTES_DOUBLE; long uniqueValuesArraySize = values.sizeInBytes();
long ordinalsSize = build.getMemorySizeInBytes(); long ordinalsSize = build.getMemorySizeInBytes();
if (uniqueValuesArraySize + ordinalsSize < singleValuesArraySize) { if (uniqueValuesArraySize + ordinalsSize < singleValuesArraySize) {
return new DoubleArrayAtomicFieldData.WithOrdinals(values, reader.maxDoc(), build); return new DoubleArrayAtomicFieldData.WithOrdinals(values, reader.maxDoc(), build);

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.fielddata.plain; package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.RamUsage; import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.util.BigFloatArrayList; import org.elasticsearch.common.util.BigFloatArrayList;
import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.Ordinals;
@ -115,7 +115,7 @@ public abstract class FloatArrayAtomicFieldData extends AtomicNumericFieldData {
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = RamUsage.NUM_BYTES_INT/*size*/ + RamUsage.NUM_BYTES_INT/*numDocs*/ + values.sizeInBytes() + ordinals.getMemorySizeInBytes(); size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + values.sizeInBytes() + ordinals.getMemorySizeInBytes();
} }
return size; return size;
} }
@ -189,7 +189,7 @@ public abstract class FloatArrayAtomicFieldData extends AtomicNumericFieldData {
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = RamUsage.NUM_BYTES_ARRAY_HEADER + values.sizeInBytes() + (set.getBits().length * RamUsage.NUM_BYTES_LONG); size = RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + values.sizeInBytes() + RamUsageEstimator.sizeOf(set.getBits());
} }
return size; return size;
} }
@ -281,7 +281,7 @@ public abstract class FloatArrayAtomicFieldData extends AtomicNumericFieldData {
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = RamUsage.NUM_BYTES_ARRAY_HEADER + values.sizeInBytes(); size = RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + values.sizeInBytes();
} }
return size; return size;
} }

View File

@ -22,13 +22,9 @@ package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.AtomicReader; import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext; import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.*;
import org.apache.lucene.util.BytesRefIterator;
import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.ElasticSearchException; import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigFloatArrayList; import org.elasticsearch.common.util.BigFloatArrayList;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
@ -108,7 +104,7 @@ public class FloatArrayIndexFieldData extends AbstractIndexFieldData<FloatArrayA
final FixedBitSet set = builder.buildDocsWithValuesSet(); final FixedBitSet set = builder.buildDocsWithValuesSet();
// there's sweatspot where due to low unique value count, using ordinals will consume less memory // there's sweatspot where due to low unique value count, using ordinals will consume less memory
long singleValuesArraySize = reader.maxDoc() * RamUsage.NUM_BYTES_FLOAT + (set == null ? 0 : set.getBits().length * RamUsage.NUM_BYTES_LONG + RamUsage.NUM_BYTES_INT); long singleValuesArraySize = reader.maxDoc() * RamUsageEstimator.NUM_BYTES_FLOAT + (set == null ? 0 : RamUsageEstimator.sizeOf(set.getBits()) + RamUsageEstimator.NUM_BYTES_INT);
long uniqueValuesArraySize = values.sizeInBytes(); long uniqueValuesArraySize = values.sizeInBytes();
long ordinalsSize = build.getMemorySizeInBytes(); long ordinalsSize = build.getMemorySizeInBytes();
if (uniqueValuesArraySize + ordinalsSize < singleValuesArraySize) { if (uniqueValuesArraySize + ordinalsSize < singleValuesArraySize) {

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.fielddata.plain; package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.RamUsage; import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.util.BigDoubleArrayList; import org.elasticsearch.common.util.BigDoubleArrayList;
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
@ -119,7 +119,7 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = RamUsage.NUM_BYTES_INT/*size*/ + RamUsage.NUM_BYTES_INT/*numDocs*/ + lon.sizeInBytes() + lat.sizeInBytes(); size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + lon.sizeInBytes() + lat.sizeInBytes();
} }
return size; return size;
} }
@ -158,7 +158,7 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
@Override @Override
public GeoPoint getValue(int docId) { public GeoPoint getValue(int docId) {
long ord = ordinals.getOrd(docId); long ord = ordinals.getOrd(docId);
if (ord == 0L) { if (ord == 0L) {
return null; return null;
} }
@ -278,7 +278,7 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = RamUsage.NUM_BYTES_INT/*size*/ + RamUsage.NUM_BYTES_INT/*numDocs*/ + (RamUsage.NUM_BYTES_ARRAY_HEADER + (lon.length * RamUsage.NUM_BYTES_DOUBLE)) + (RamUsage.NUM_BYTES_ARRAY_HEADER + (lat.length * RamUsage.NUM_BYTES_DOUBLE)) + (set.getBits().length * RamUsage.NUM_BYTES_LONG); size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + RamUsageEstimator.sizeOf(lon) + RamUsageEstimator.sizeOf(lat) + RamUsageEstimator.sizeOf(set.getBits());
} }
return size; return size;
} }
@ -378,7 +378,7 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AtomicGeoPointF
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = RamUsage.NUM_BYTES_INT/*size*/ + RamUsage.NUM_BYTES_INT/*numDocs*/ + (RamUsage.NUM_BYTES_ARRAY_HEADER + (lon.length * RamUsage.NUM_BYTES_DOUBLE)) + (RamUsage.NUM_BYTES_ARRAY_HEADER + (lat.length * RamUsage.NUM_BYTES_DOUBLE)); size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + (RamUsageEstimator.sizeOf(lon) + RamUsageEstimator.sizeOf(lat));
} }
return size; return size;
} }

View File

@ -19,9 +19,9 @@
package org.elasticsearch.index.fielddata.plain; package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer; import org.apache.lucene.util.packed.MonotonicAppendingLongBuffer;
import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedInts;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.index.fielddata.*; import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.ordinals.Ordinals; import org.elasticsearch.index.fielddata.ordinals.Ordinals;
@ -116,7 +116,7 @@ public abstract class PackedArrayAtomicFieldData extends AtomicNumericFieldData
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = RamUsage.NUM_BYTES_INT/*size*/ + RamUsage.NUM_BYTES_INT/*numDocs*/ + values.ramBytesUsed() + ordinals.getMemorySizeInBytes(); size = RamUsageEstimator.NUM_BYTES_INT/*size*/ + RamUsageEstimator.NUM_BYTES_INT/*numDocs*/ + values.ramBytesUsed() + ordinals.getMemorySizeInBytes();
} }
return size; return size;
} }
@ -194,7 +194,7 @@ public abstract class PackedArrayAtomicFieldData extends AtomicNumericFieldData
@Override @Override
public long getMemorySizeInBytes() { public long getMemorySizeInBytes() {
if (size == -1) { if (size == -1) {
size = values.ramBytesUsed() + 2 * RamUsage.NUM_BYTES_LONG; size = values.ramBytesUsed() + 2 * RamUsageEstimator.NUM_BYTES_LONG;
} }
return size; return size;
} }