Remove the HPPC dependency from all modules and move the HPPC fork to internal. (#13422)

* Remove hppc dependency
* Change fork version to 0.10.0
* Add @lucene.internal
* Move hppc classes to oal.internal.hppc but export it.
* Delete hppc license since it's no longer a dependency.

---------

Co-authored-by: Dawid Weiss <dawid.weiss@carrotsearch.com>
This commit is contained in:
Bruno Roustant 2024-05-27 12:09:25 +02:00 committed by GitHub
parent ddf538d43e
commit f394c9418e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
136 changed files with 5853 additions and 579 deletions

View File

@ -240,6 +240,11 @@ API Changes
* GITHUB#13145: Deprecate ByteBufferIndexInput as it will be removed in Lucene 10.0. (Uwe Schindler) * GITHUB#13145: Deprecate ByteBufferIndexInput as it will be removed in Lucene 10.0. (Uwe Schindler)
* GITHUB#13422: an explicit dependency on the HPPC library is removed in favor of an internal repackaged copy in
oal.internal.hppc. If you relied on HPPC as a transitive dependency, you'll have to add it to your project explicitly.
The HPPC classes now bundled in Lucene core are internal and will have restricted access in future releases, please do
not use them. (Bruno Roustant, Dawid Weiss, Uwe Schindler, Chris Hegarty)
New Features New Features
--------------------- ---------------------

View File

@ -20,11 +20,11 @@ import java.io.IOException;
import java.io.Reader; import java.io.Reader;
import org.apache.lucene.analysis.CharFilter; // javadocs import org.apache.lucene.analysis.CharFilter; // javadocs
import org.apache.lucene.analysis.util.RollingCharBuffer; import org.apache.lucene.analysis.util.RollingCharBuffer;
import org.apache.lucene.internal.hppc.CharObjectHashMap;
import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.fst.CharSequenceOutputs; import org.apache.lucene.util.fst.CharSequenceOutputs;
import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.Outputs; import org.apache.lucene.util.fst.Outputs;
import org.apache.lucene.util.hppc.CharObjectHashMap;
/** /**
* Simplistic {@link CharFilter} that applies the mappings contained in a {@link NormalizeCharMap} * Simplistic {@link CharFilter} that applies the mappings contained in a {@link NormalizeCharMap}

View File

@ -19,6 +19,7 @@ package org.apache.lucene.analysis.charfilter;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.lucene.internal.hppc.CharObjectHashMap;
import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.fst.CharSequenceOutputs; import org.apache.lucene.util.fst.CharSequenceOutputs;
@ -26,7 +27,6 @@ import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.FSTCompiler; import org.apache.lucene.util.fst.FSTCompiler;
import org.apache.lucene.util.fst.Outputs; import org.apache.lucene.util.fst.Outputs;
import org.apache.lucene.util.fst.Util; import org.apache.lucene.util.fst.Util;
import org.apache.lucene.util.hppc.CharObjectHashMap;
// TODO: save/load? // TODO: save/load?

View File

@ -26,10 +26,10 @@ import org.apache.lucene.analysis.synonym.SynonymGraphFilter;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.internal.hppc.IntCursor;
import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.RollingBuffer; import org.apache.lucene.util.RollingBuffer;
import org.apache.lucene.util.hppc.IntArrayList;
import org.apache.lucene.util.hppc.IntCursor;
/** /**
* Converts an incoming graph token stream, such as one from {@link SynonymGraphFilter}, into a flat * Converts an incoming graph token stream, such as one from {@link SynonymGraphFilter}, into a flat

View File

@ -50,6 +50,9 @@ import java.util.TreeMap;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import org.apache.lucene.analysis.hunspell.SortingStrategy.EntryAccumulator; import org.apache.lucene.analysis.hunspell.SortingStrategy.EntryAccumulator;
import org.apache.lucene.analysis.hunspell.SortingStrategy.EntrySupplier; import org.apache.lucene.analysis.hunspell.SortingStrategy.EntrySupplier;
import org.apache.lucene.internal.hppc.CharHashSet;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.internal.hppc.IntCursor;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRef;
@ -58,9 +61,6 @@ import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.FSTCompiler; import org.apache.lucene.util.fst.FSTCompiler;
import org.apache.lucene.util.fst.IntSequenceOutputs; import org.apache.lucene.util.fst.IntSequenceOutputs;
import org.apache.lucene.util.fst.Util; import org.apache.lucene.util.fst.Util;
import org.apache.lucene.util.hppc.CharHashSet;
import org.apache.lucene.util.hppc.IntArrayList;
import org.apache.lucene.util.hppc.IntCursor;
/** In-memory structure for the dictionary (.dic) and affix (.aff) data of a hunspell dictionary. */ /** In-memory structure for the dictionary (.dic) and affix (.aff) data of a hunspell dictionary. */
public class Dictionary { public class Dictionary {

View File

@ -17,10 +17,10 @@
package org.apache.lucene.analysis.hunspell; package org.apache.lucene.analysis.hunspell;
import java.util.function.Consumer; import java.util.function.Consumer;
import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.CharsRef; import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.hppc.IntObjectHashMap;
/** /**
* A cache allowing for CPU-cache-friendlier iteration over {@link WordStorage} entries that can be * A cache allowing for CPU-cache-friendlier iteration over {@link WordStorage} entries that can be

View File

@ -39,11 +39,11 @@ import java.util.function.Function;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import org.apache.lucene.analysis.hunspell.AffixedWord.Affix; import org.apache.lucene.analysis.hunspell.AffixedWord.Affix;
import org.apache.lucene.internal.hppc.CharHashSet;
import org.apache.lucene.internal.hppc.CharObjectHashMap;
import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.IntsRefFSTEnum; import org.apache.lucene.util.fst.IntsRefFSTEnum;
import org.apache.lucene.util.hppc.CharHashSet;
import org.apache.lucene.util.hppc.CharObjectHashMap;
/** /**
* A utility class used for generating possible word forms by adding affixes to stems ({@link * A utility class used for generating possible word forms by adding affixes to stems ({@link

View File

@ -20,6 +20,7 @@ import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.function.Consumer; import java.util.function.Consumer;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ByteArrayDataInput;
import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.DataOutput;
@ -28,7 +29,6 @@ import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.fst.IntSequenceOutputs; import org.apache.lucene.util.fst.IntSequenceOutputs;
import org.apache.lucene.util.hppc.IntArrayList;
/** /**
* A data structure for memory-efficient word storage and fast lookup/enumeration. Each dictionary * A data structure for memory-efficient word storage and fast lookup/enumeration. Each dictionary

View File

@ -20,11 +20,11 @@ import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.EnumMap; import java.util.EnumMap;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.internal.hppc.IntCursor;
import org.apache.lucene.internal.hppc.IntIntHashMap;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.hppc.IntArrayList;
import org.apache.lucene.util.hppc.IntCursor;
import org.apache.lucene.util.hppc.IntIntHashMap;
/** {@link Viterbi} subclass for n-best path calculation. */ /** {@link Viterbi} subclass for n-best path calculation. */
public abstract class ViterbiNBest<T extends Token, U extends MorphData> public abstract class ViterbiNBest<T extends Token, U extends MorphData>

View File

@ -21,9 +21,9 @@ import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.util.AttributeFactory; import org.apache.lucene.util.AttributeFactory;
import org.apache.lucene.util.IgnoreRandomChains; import org.apache.lucene.util.IgnoreRandomChains;
import org.apache.lucene.util.hppc.IntArrayList;
/** /**
* Tokenizer for domain-like hierarchies. * Tokenizer for domain-like hierarchies.

View File

@ -26,6 +26,8 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.internal.hppc.IntHashSet;
import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.ByteArrayDataOutput;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
@ -37,8 +39,6 @@ import org.apache.lucene.util.fst.ByteSequenceOutputs;
import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.FST;
import org.apache.lucene.util.fst.FSTCompiler; import org.apache.lucene.util.fst.FSTCompiler;
import org.apache.lucene.util.fst.Util; import org.apache.lucene.util.fst.Util;
import org.apache.lucene.util.hppc.IntArrayList;
import org.apache.lucene.util.hppc.IntHashSet;
/** /**
* A map of synonyms, keys and values are phrases. * A map of synonyms, keys and values are phrases.

View File

@ -27,11 +27,11 @@ import java.nio.charset.StandardCharsets;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.lucene.analysis.TokenizerFactory; import org.apache.lucene.analysis.TokenizerFactory;
import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.util.AttributeFactory; import org.apache.lucene.util.AttributeFactory;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.ResourceLoader; import org.apache.lucene.util.ResourceLoader;
import org.apache.lucene.util.ResourceLoaderAware; import org.apache.lucene.util.ResourceLoaderAware;
import org.apache.lucene.util.hppc.IntObjectHashMap;
/** /**
* Factory for {@link ICUTokenizer}. Words are broken across script boundaries, then segmented * Factory for {@link ICUTokenizer}. Words are broken across script boundaries, then segmented

View File

@ -17,7 +17,7 @@
package org.apache.lucene.analysis.ja; package org.apache.lucene.analysis.ja;
import java.util.Map; import java.util.Map;
import org.apache.lucene.util.hppc.CharObjectHashMap; import org.apache.lucene.internal.hppc.CharObjectHashMap;
/** Utility methods for Japanese filters. */ /** Utility methods for Japanese filters. */
class JapaneseFilterUtil { class JapaneseFilterUtil {

View File

@ -23,7 +23,7 @@ import java.util.Map;
import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.util.hppc.CharObjectHashMap; import org.apache.lucene.internal.hppc.CharObjectHashMap;
/** /**
* A {@link TokenFilter} that normalizes small letters (捨て仮名) in hiragana into normal letters. For * A {@link TokenFilter} that normalizes small letters (捨て仮名) in hiragana into normal letters. For

View File

@ -23,7 +23,7 @@ import java.util.Map;
import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.util.hppc.CharObjectHashMap; import org.apache.lucene.internal.hppc.CharObjectHashMap;
/** /**
* A {@link TokenFilter} that normalizes small letters (捨て仮名) in katakana into normal letters. For * A {@link TokenFilter} that normalizes small letters (捨て仮名) in katakana into normal letters. For

View File

@ -19,9 +19,9 @@ package org.apache.lucene.analysis.cn.smart.hhmm;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.lucene.analysis.cn.smart.Utility; import org.apache.lucene.analysis.cn.smart.Utility;
import org.apache.lucene.util.hppc.IntArrayList; import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.util.hppc.IntObjectHashMap; import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.util.hppc.ObjectCursor; import org.apache.lucene.internal.hppc.ObjectCursor;
/** /**
* Graph representing possible token pairs (bigrams) at each start offset in the sentence. * Graph representing possible token pairs (bigrams) at each start offset in the sentence.

View File

@ -18,7 +18,7 @@ package org.apache.lucene.analysis.cn.smart.hhmm;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.lucene.util.hppc.IntObjectHashMap; import org.apache.lucene.internal.hppc.IntObjectHashMap;
/** /**
* Graph representing possible tokens at each start offset in the sentence. * Graph representing possible tokens at each start offset in the sentence.

View File

@ -58,7 +58,7 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import org.apache.lucene.util.hppc.ObjectCursor; import org.apache.lucene.internal.hppc.ObjectCursor;
/** /**
* The Gener object helps in the discarding of nodes which break the reduction effort and defend the * The Gener object helps in the discarding of nodes which break the reduction effort and defend the

View File

@ -58,7 +58,7 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import org.apache.lucene.util.hppc.ObjectCursor; import org.apache.lucene.internal.hppc.ObjectCursor;
/** /**
* The Lift class is a data structure that is a variation of a Patricia trie. * The Lift class is a data structure that is a variation of a Patricia trie.

View File

@ -58,7 +58,7 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import org.apache.lucene.util.hppc.CharCursor; import org.apache.lucene.internal.hppc.CharCursor;
/** /**
* The Optimizer class is a Trie that will be reduced (have empty rows removed). * The Optimizer class is a Trie that will be reduced (have empty rows removed).

View File

@ -58,8 +58,8 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import org.apache.lucene.util.hppc.CharCursor; import org.apache.lucene.internal.hppc.CharCursor;
import org.apache.lucene.util.hppc.ObjectCursor; import org.apache.lucene.internal.hppc.ObjectCursor;
/** The Reduce object is used to remove gaps in a Trie which stores a dictionary. */ /** The Reduce object is used to remove gaps in a Trie which stores a dictionary. */
public class Reduce { public class Reduce {

View File

@ -59,9 +59,9 @@ import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.util.Iterator; import java.util.Iterator;
import org.apache.lucene.util.hppc.CharCursor; import org.apache.lucene.internal.hppc.CharCursor;
import org.apache.lucene.util.hppc.CharObjectHashMap; import org.apache.lucene.internal.hppc.CharObjectHashMap;
import org.apache.lucene.util.hppc.ObjectCursor; import org.apache.lucene.internal.hppc.ObjectCursor;
/** The Row class represents a row in a matrix representation of a trie. */ /** The Row class represents a row in a matrix representation of a trie. */
public class Row { public class Row {

View File

@ -23,6 +23,7 @@ import java.util.Set;
import org.apache.lucene.benchmark.byTask.utils.Config; import org.apache.lucene.benchmark.byTask.utils.Config;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.spatial.SpatialStrategy; import org.apache.lucene.spatial.SpatialStrategy;
import org.apache.lucene.spatial.composite.CompositeSpatialStrategy; import org.apache.lucene.spatial.composite.CompositeSpatialStrategy;
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
@ -30,7 +31,6 @@ import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTreeFactory; import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTreeFactory;
import org.apache.lucene.spatial.serialized.SerializedDVStrategy; import org.apache.lucene.spatial.serialized.SerializedDVStrategy;
import org.apache.lucene.util.hppc.IntObjectHashMap;
import org.locationtech.spatial4j.context.SpatialContext; import org.locationtech.spatial4j.context.SpatialContext;
import org.locationtech.spatial4j.context.SpatialContextFactory; import org.locationtech.spatial4j.context.SpatialContextFactory;
import org.locationtech.spatial4j.shape.Point; import org.locationtech.spatial4j.shape.Point;

View File

@ -27,7 +27,7 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Properties; import java.util.Properties;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import org.apache.lucene.util.hppc.IntArrayList; import org.apache.lucene.internal.hppc.IntArrayList;
/** /**
* Perf run configuration properties. * Perf run configuration properties.

View File

@ -23,36 +23,39 @@ module org.apache.lucene.core {
requires java.logging; requires java.logging;
requires static jdk.management; // this is optional but explicit declaration is recommended requires static jdk.management; // this is optional but explicit declaration is recommended
exports org.apache.lucene.analysis;
exports org.apache.lucene.analysis.standard; exports org.apache.lucene.analysis.standard;
exports org.apache.lucene.analysis.tokenattributes; exports org.apache.lucene.analysis.tokenattributes;
exports org.apache.lucene.codecs; exports org.apache.lucene.analysis;
exports org.apache.lucene.codecs.compressing; exports org.apache.lucene.codecs.compressing;
exports org.apache.lucene.codecs.lucene90.blocktree;
exports org.apache.lucene.codecs.lucene90.compressing;
exports org.apache.lucene.codecs.lucene90; exports org.apache.lucene.codecs.lucene90;
exports org.apache.lucene.codecs.lucene94; exports org.apache.lucene.codecs.lucene94;
exports org.apache.lucene.codecs.lucene95; exports org.apache.lucene.codecs.lucene95;
exports org.apache.lucene.codecs.lucene99; exports org.apache.lucene.codecs.lucene99;
exports org.apache.lucene.codecs.lucene90.blocktree;
exports org.apache.lucene.codecs.lucene90.compressing;
exports org.apache.lucene.codecs.perfield; exports org.apache.lucene.codecs.perfield;
exports org.apache.lucene.codecs;
exports org.apache.lucene.document; exports org.apache.lucene.document;
exports org.apache.lucene.geo; exports org.apache.lucene.geo;
exports org.apache.lucene.index; exports org.apache.lucene.index;
exports org.apache.lucene.search;
exports org.apache.lucene.search.comparators; exports org.apache.lucene.search.comparators;
exports org.apache.lucene.search.similarities;
exports org.apache.lucene.search.knn; exports org.apache.lucene.search.knn;
exports org.apache.lucene.search.similarities;
exports org.apache.lucene.search;
exports org.apache.lucene.store; exports org.apache.lucene.store;
exports org.apache.lucene.util;
exports org.apache.lucene.util.automaton; exports org.apache.lucene.util.automaton;
exports org.apache.lucene.util.bkd; exports org.apache.lucene.util.bkd;
exports org.apache.lucene.util.compress; exports org.apache.lucene.util.compress;
exports org.apache.lucene.util.fst; exports org.apache.lucene.util.fst;
exports org.apache.lucene.util.graph; exports org.apache.lucene.util.graph;
exports org.apache.lucene.util.hnsw; exports org.apache.lucene.util.hnsw;
exports org.apache.lucene.util.hppc;
exports org.apache.lucene.util.mutable; exports org.apache.lucene.util.mutable;
exports org.apache.lucene.util.packed; exports org.apache.lucene.util.packed;
exports org.apache.lucene.util;
// Temporarily export HPPC to all modules (eventually, this
// should be restricted to only Lucene modules)
exports org.apache.lucene.internal.hppc;
// Only export internal packages to the test framework. // Only export internal packages to the test framework.
exports org.apache.lucene.internal.tests to exports org.apache.lucene.internal.tests to

View File

@ -25,11 +25,11 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.internal.hppc.IntCursor;
import org.apache.lucene.internal.hppc.IntIntHashMap;
import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.Transition; import org.apache.lucene.util.automaton.Transition;
import org.apache.lucene.util.hppc.IntArrayList;
import org.apache.lucene.util.hppc.IntCursor;
import org.apache.lucene.util.hppc.IntIntHashMap;
/** Converts an Automaton into a TokenStream. */ /** Converts an Automaton into a TokenStream. */
public class AutomatonToTokenStream { public class AutomatonToTokenStream {

View File

@ -36,6 +36,8 @@ import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.internal.hppc.LongHashSet;
import org.apache.lucene.internal.hppc.LongIntHashMap;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSelector;
import org.apache.lucene.store.ByteArrayDataOutput; import org.apache.lucene.store.ByteArrayDataOutput;
@ -50,8 +52,6 @@ import org.apache.lucene.util.LongsRef;
import org.apache.lucene.util.MathUtil; import org.apache.lucene.util.MathUtil;
import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.compress.LZ4; import org.apache.lucene.util.compress.LZ4;
import org.apache.lucene.util.hppc.LongHashSet;
import org.apache.lucene.util.hppc.LongIntHashMap;
import org.apache.lucene.util.packed.DirectMonotonicWriter; import org.apache.lucene.util.packed.DirectMonotonicWriter;
import org.apache.lucene.util.packed.DirectWriter; import org.apache.lucene.util.packed.DirectWriter;

View File

@ -29,11 +29,11 @@ import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.RandomAccessInput; import org.apache.lucene.store.RandomAccessInput;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.hppc.IntObjectHashMap;
/** Reader for {@link Lucene90NormsFormat} */ /** Reader for {@link Lucene90NormsFormat} */
final class Lucene90NormsProducer extends NormsProducer implements Cloneable { final class Lucene90NormsProducer extends NormsProducer implements Cloneable {

View File

@ -24,12 +24,12 @@ import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.PointValues; import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.store.ReadAdvice;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.bkd.BKDReader; import org.apache.lucene.util.bkd.BKDReader;
import org.apache.lucene.util.hppc.IntObjectHashMap;
/** Reads point values previously written with {@link Lucene90PointsWriter} */ /** Reads point values previously written with {@link Lucene90PointsWriter} */
public class Lucene90PointsReader extends PointsReader { public class Lucene90PointsReader extends PointsReader {

View File

@ -31,6 +31,8 @@ import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentReadState;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.internal.hppc.IntCursor;
import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.store.ReadAdvice;
@ -38,8 +40,6 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.fst.ByteSequenceOutputs; import org.apache.lucene.util.fst.ByteSequenceOutputs;
import org.apache.lucene.util.fst.Outputs; import org.apache.lucene.util.fst.Outputs;
import org.apache.lucene.util.hppc.IntCursor;
import org.apache.lucene.util.hppc.IntObjectHashMap;
/** /**
* A block-based terms index and dictionary that assigns terms to variable length blocks according * A block-based terms index and dictionary that assigns terms to variable length blocks according

View File

@ -39,6 +39,7 @@ import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.MergeState; import org.apache.lucene.index.MergeState;
import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.index.SegmentInfo;
import org.apache.lucene.internal.hppc.IntHashSet;
import org.apache.lucene.store.ByteBuffersDataInput; import org.apache.lucene.store.ByteBuffersDataInput;
import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.store.ByteBuffersDataOutput;
import org.apache.lucene.store.DataInput; import org.apache.lucene.store.DataInput;
@ -51,7 +52,6 @@ import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.hppc.IntHashSet;
import org.apache.lucene.util.packed.BlockPackedWriter; import org.apache.lucene.util.packed.BlockPackedWriter;
import org.apache.lucene.util.packed.DirectWriter; import org.apache.lucene.util.packed.DirectWriter;
import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedInts;

View File

@ -48,6 +48,7 @@ import org.apache.lucene.index.SegmentWriteState;
import org.apache.lucene.index.Sorter; import org.apache.lucene.index.Sorter;
import org.apache.lucene.index.VectorEncoding; import org.apache.lucene.index.VectorEncoding;
import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.index.VectorSimilarityFunction;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.VectorScorer; import org.apache.lucene.search.VectorScorer;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
@ -59,7 +60,6 @@ import org.apache.lucene.util.VectorUtil;
import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier;
import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorer;
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
import org.apache.lucene.util.hppc.IntArrayList;
import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.QuantizedByteVectorValues;
import org.apache.lucene.util.quantization.QuantizedVectorsReader; import org.apache.lucene.util.quantization.QuantizedVectorsReader;
import org.apache.lucene.util.quantization.ScalarQuantizer; import org.apache.lucene.util.quantization.ScalarQuantizer;

View File

@ -35,6 +35,7 @@ import org.apache.lucene.geo.Rectangle;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PointValues; import org.apache.lucene.index.PointValues;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
@ -53,7 +54,6 @@ import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.SloppyMath; import org.apache.lucene.util.SloppyMath;
import org.apache.lucene.util.hppc.IntArrayList;
/** /**
* An indexed location field. * An indexed location field.

View File

@ -28,9 +28,9 @@ import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.PointValues.IntersectVisitor; import org.apache.lucene.index.PointValues.IntersectVisitor;
import org.apache.lucene.index.PointValues.PointTree; import org.apache.lucene.index.PointValues.PointTree;
import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.index.PointValues.Relation;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.SloppyMath; import org.apache.lucene.util.SloppyMath;
import org.apache.lucene.util.hppc.IntArrayList;
/** KNN search on top of 2D lat/lon indexed points. */ /** KNN search on top of 2D lat/lon indexed points. */
class NearestNeighbor { class NearestNeighbor {

View File

@ -26,13 +26,13 @@ import java.util.Locale;
import java.util.Set; import java.util.Set;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.internal.hppc.LongHashSet;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOConsumer; import org.apache.lucene.util.IOConsumer;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.hppc.LongHashSet;
/** /**
* Tracks the stream of {@link FrozenBufferedUpdates}. When DocumentsWriterPerThread flushes, its * Tracks the stream of {@link FrozenBufferedUpdates}. When DocumentsWriterPerThread flushes, its

View File

@ -33,8 +33,8 @@ import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.StreamSupport; import java.util.stream.StreamSupport;
import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.CollectionUtil;
import org.apache.lucene.util.hppc.IntObjectHashMap;
/** /**
* Collection of {@link FieldInfo}s (accessible by number or by name). * Collection of {@link FieldInfo}s (accessible by number or by name).

View File

@ -58,6 +58,8 @@ import org.apache.lucene.index.FieldInfos.FieldNumbers;
import org.apache.lucene.index.IndexWriterConfig.OpenMode; import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.MergePolicy.MergeReader; import org.apache.lucene.index.MergePolicy.MergeReader;
import org.apache.lucene.index.Sorter.DocMap; import org.apache.lucene.index.Sorter.DocMap;
import org.apache.lucene.internal.hppc.LongObjectHashMap;
import org.apache.lucene.internal.hppc.ObjectCursor;
import org.apache.lucene.internal.tests.IndexPackageAccess; import org.apache.lucene.internal.tests.IndexPackageAccess;
import org.apache.lucene.internal.tests.IndexWriterAccess; import org.apache.lucene.internal.tests.IndexWriterAccess;
import org.apache.lucene.internal.tests.TestSecrets; import org.apache.lucene.internal.tests.TestSecrets;
@ -90,8 +92,6 @@ import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.ThreadInterruptedException; import org.apache.lucene.util.ThreadInterruptedException;
import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.Version; import org.apache.lucene.util.Version;
import org.apache.lucene.util.hppc.LongObjectHashMap;
import org.apache.lucene.util.hppc.ObjectCursor;
/** /**
* An <code>IndexWriter</code> creates and maintains an index. * An <code>IndexWriter</code> creates and maintains an index.

View File

@ -19,12 +19,12 @@ package org.apache.lucene.index;
import java.io.IOException; import java.io.IOException;
import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.internal.hppc.LongArrayList;
import org.apache.lucene.internal.hppc.LongCursor;
import org.apache.lucene.internal.hppc.LongObjectHashMap;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.RefCount; import org.apache.lucene.util.RefCount;
import org.apache.lucene.util.hppc.LongArrayList;
import org.apache.lucene.util.hppc.LongCursor;
import org.apache.lucene.util.hppc.LongObjectHashMap;
/** /**
* Manages the {@link DocValuesProducer} held by {@link SegmentReader} and keeps track of their * Manages the {@link DocValuesProducer} held by {@link SegmentReader} and keeps track of their

View File

@ -23,8 +23,8 @@ import java.util.IdentityHashMap;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.DocValuesProducer;
import org.apache.lucene.internal.hppc.LongArrayList;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.hppc.LongArrayList;
/** Encapsulates multiple producers when there are docvalues updates as one producer */ /** Encapsulates multiple producers when there are docvalues updates as one producer */
// TODO: try to clean up close? no-op? // TODO: try to clean up close? no-op?

View File

@ -28,13 +28,13 @@ import org.apache.lucene.codecs.NormsProducer;
import org.apache.lucene.codecs.PointsReader; import org.apache.lucene.codecs.PointsReader;
import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.codecs.TermVectorsReader; import org.apache.lucene.codecs.TermVectorsReader;
import org.apache.lucene.internal.hppc.LongArrayList;
import org.apache.lucene.internal.tests.SegmentReaderAccess; import org.apache.lucene.internal.tests.SegmentReaderAccess;
import org.apache.lucene.internal.tests.TestSecrets; import org.apache.lucene.internal.tests.TestSecrets;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.hppc.LongArrayList;
/** /**
* IndexReader implementation over a single segment. * IndexReader implementation over a single segment.

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import java.util.Iterator; import java.util.Iterator;
import java.util.NoSuchElementException; import java.util.NoSuchElementException;
@ -24,6 +24,8 @@ import java.util.NoSuchElementException;
* Simplifies the implementation of iterators a bit. Modeled loosely after Google Guava's API. * Simplifies the implementation of iterators a bit. Modeled loosely after Google Guava's API.
* *
* <p>Forked from com.carrotsearch.hppc.AbstractIterator * <p>Forked from com.carrotsearch.hppc.AbstractIterator
*
* @lucene.internal
*/ */
public abstract class AbstractIterator<E> implements Iterator<E> { public abstract class AbstractIterator<E> implements Iterator<E> {
private static final int NOT_CACHED = 0; private static final int NOT_CACHED = 0;

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
/** /**
* Bit mixing utilities. The purpose of these methods is to evenly distribute key space over int32 * Bit mixing utilities. The purpose of these methods is to evenly distribute key space over int32
@ -23,7 +23,9 @@ package org.apache.lucene.util.hppc;
* *
* <p>Forked from com.carrotsearch.hppc.BitMixer * <p>Forked from com.carrotsearch.hppc.BitMixer
* *
* <p>github: https://github.com/carrotsearch/hppc release: 0.9.0 * <p>github: https://github.com/carrotsearch/hppc release: 0.10.0
*
* @lucene.internal
*/ */
public final class BitMixer { public final class BitMixer {

View File

@ -15,12 +15,16 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import java.util.IllegalFormatException; import java.util.IllegalFormatException;
import java.util.Locale; import java.util.Locale;
/** BufferAllocationException forked from HPPC */ /**
* BufferAllocationException forked from HPPC.
*
* @lucene.internal
*/
public class BufferAllocationException extends RuntimeException { public class BufferAllocationException extends RuntimeException {
public BufferAllocationException(String message) { public BufferAllocationException(String message) {
super(message); super(message);

View File

@ -15,9 +15,13 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
/** Forked from HPPC, holding int index and char value */ /**
* Forked from HPPC, holding int index and char value.
*
* @lucene.internal
*/
public final class CharCursor { public final class CharCursor {
/** /**
* The current value's index in the container this cursor belongs to. The meaning of this index is * The current value's index in the container this cursor belongs to. The meaning of this index is

View File

@ -15,18 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS;
import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_LOAD_FACTOR;
import static org.apache.lucene.util.hppc.HashContainers.ITERATION_SEED;
import static org.apache.lucene.util.hppc.HashContainers.MAX_LOAD_FACTOR;
import static org.apache.lucene.util.hppc.HashContainers.MIN_LOAD_FACTOR;
import static org.apache.lucene.util.hppc.HashContainers.checkLoadFactor;
import static org.apache.lucene.util.hppc.HashContainers.expandAtCount;
import static org.apache.lucene.util.hppc.HashContainers.iterationIncrement;
import static org.apache.lucene.util.hppc.HashContainers.minBufferSize;
import static org.apache.lucene.util.hppc.HashContainers.nextBufferSize;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
@ -39,7 +28,9 @@ import org.apache.lucene.util.RamUsageEstimator;
* *
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.CharHashSet * <p>Mostly forked and trimmed from com.carrotsearch.hppc.CharHashSet
* *
* <p>github: https://github.com/carrotsearch/hppc release 0.9.0 * <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/ */
public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable { public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable {
@ -76,7 +67,7 @@ public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable
/** New instance with sane defaults. */ /** New instance with sane defaults. */
public CharHashSet() { public CharHashSet() {
this(DEFAULT_EXPECTED_ELEMENTS); this(HashContainers.DEFAULT_EXPECTED_ELEMENTS);
} }
/** /**
@ -86,7 +77,7 @@ public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable
* (inclusive). * (inclusive).
*/ */
public CharHashSet(int expectedElements) { public CharHashSet(int expectedElements) {
this(expectedElements, DEFAULT_LOAD_FACTOR); this(expectedElements, HashContainers.DEFAULT_LOAD_FACTOR);
} }
/** /**
@ -99,7 +90,7 @@ public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable
*/ */
public CharHashSet(int expectedElements, double loadFactor) { public CharHashSet(int expectedElements, double loadFactor) {
this.loadFactor = verifyLoadFactor(loadFactor); this.loadFactor = verifyLoadFactor(loadFactor);
iterationSeed = ITERATION_SEED.incrementAndGet(); iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet();
ensureCapacity(expectedElements); ensureCapacity(expectedElements);
} }
@ -193,7 +184,7 @@ public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable
final char[] keys = this.keys; final char[] keys = this.keys;
int seed = nextIterationSeed(); int seed = nextIterationSeed();
int inc = iterationIncrement(seed); int inc = HashContainers.iterationIncrement(seed);
for (int i = 0, mask = this.mask, slot = seed & mask; for (int i = 0, mask = this.mask, slot = seed & mask;
i <= mask; i <= mask;
i++, slot = (slot + inc) & mask) { i++, slot = (slot + inc) & mask) {
@ -292,7 +283,7 @@ public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable
assigned = 0; assigned = 0;
hasEmptyKey = false; hasEmptyKey = false;
keys = null; keys = null;
ensureCapacity(DEFAULT_EXPECTED_ELEMENTS); ensureCapacity(HashContainers.DEFAULT_EXPECTED_ELEMENTS);
} }
public boolean isEmpty() { public boolean isEmpty() {
@ -308,7 +299,7 @@ public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable
public void ensureCapacity(int expectedElements) { public void ensureCapacity(int expectedElements) {
if (expectedElements > resizeAt || keys == null) { if (expectedElements > resizeAt || keys == null) {
final char[] prevKeys = this.keys; final char[] prevKeys = this.keys;
allocateBuffers(minBufferSize(expectedElements, loadFactor)); allocateBuffers(HashContainers.minBufferSize(expectedElements, loadFactor));
if (prevKeys != null && !isEmpty()) { if (prevKeys != null && !isEmpty()) {
rehash(prevKeys); rehash(prevKeys);
} }
@ -360,7 +351,7 @@ public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable
CharHashSet cloned = (CharHashSet) super.clone(); CharHashSet cloned = (CharHashSet) super.clone();
cloned.keys = keys.clone(); cloned.keys = keys.clone();
cloned.hasEmptyKey = hasEmptyKey; cloned.hasEmptyKey = hasEmptyKey;
cloned.iterationSeed = ITERATION_SEED.incrementAndGet(); cloned.iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet();
return cloned; return cloned;
} catch (CloneNotSupportedException e) { } catch (CloneNotSupportedException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
@ -396,7 +387,7 @@ public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable
public EntryIterator() { public EntryIterator() {
cursor = new CharCursor(); cursor = new CharCursor();
int seed = nextIterationSeed(); int seed = nextIterationSeed();
increment = iterationIncrement(seed); increment = HashContainers.iterationIncrement(seed);
slot = seed & mask; slot = seed & mask;
} }
@ -589,7 +580,8 @@ public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable
* factors. * factors.
*/ */
protected double verifyLoadFactor(double loadFactor) { protected double verifyLoadFactor(double loadFactor) {
checkLoadFactor(loadFactor, MIN_LOAD_FACTOR, MAX_LOAD_FACTOR); HashContainers.checkLoadFactor(
loadFactor, HashContainers.MIN_LOAD_FACTOR, HashContainers.MAX_LOAD_FACTOR);
return loadFactor; return loadFactor;
} }
@ -631,7 +623,7 @@ public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable
e, this.keys == null ? 0 : size(), arraySize); e, this.keys == null ? 0 : size(), arraySize);
} }
this.resizeAt = expandAtCount(arraySize, loadFactor); this.resizeAt = HashContainers.expandAtCount(arraySize, loadFactor);
this.mask = arraySize - 1; this.mask = arraySize - 1;
} }
@ -648,7 +640,7 @@ public class CharHashSet implements Iterable<CharCursor>, Accountable, Cloneable
// Try to allocate new buffers first. If we OOM, we leave in a consistent state. // Try to allocate new buffers first. If we OOM, we leave in a consistent state.
final char[] prevKeys = this.keys; final char[] prevKeys = this.keys;
allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor)); allocateBuffers(HashContainers.nextBufferSize(mask + 1, size(), loadFactor));
assert this.keys.length > prevKeys.length; assert this.keys.length > prevKeys.length;
// We have succeeded at allocating new data so insert the pending key/value at // We have succeeded at allocating new data so insert the pending key/value at

View File

@ -15,19 +15,19 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS;
import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_LOAD_FACTOR; import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_LOAD_FACTOR;
import static org.apache.lucene.util.hppc.HashContainers.ITERATION_SEED; import static org.apache.lucene.internal.hppc.HashContainers.ITERATION_SEED;
import static org.apache.lucene.util.hppc.HashContainers.MAX_LOAD_FACTOR; import static org.apache.lucene.internal.hppc.HashContainers.MAX_LOAD_FACTOR;
import static org.apache.lucene.util.hppc.HashContainers.MIN_LOAD_FACTOR; import static org.apache.lucene.internal.hppc.HashContainers.MIN_LOAD_FACTOR;
import static org.apache.lucene.util.hppc.HashContainers.checkLoadFactor; import static org.apache.lucene.internal.hppc.HashContainers.checkLoadFactor;
import static org.apache.lucene.util.hppc.HashContainers.checkPowerOfTwo; import static org.apache.lucene.internal.hppc.HashContainers.checkPowerOfTwo;
import static org.apache.lucene.util.hppc.HashContainers.expandAtCount; import static org.apache.lucene.internal.hppc.HashContainers.expandAtCount;
import static org.apache.lucene.util.hppc.HashContainers.iterationIncrement; import static org.apache.lucene.internal.hppc.HashContainers.iterationIncrement;
import static org.apache.lucene.util.hppc.HashContainers.minBufferSize; import static org.apache.lucene.internal.hppc.HashContainers.minBufferSize;
import static org.apache.lucene.util.hppc.HashContainers.nextBufferSize; import static org.apache.lucene.internal.hppc.HashContainers.nextBufferSize;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
@ -40,7 +40,9 @@ import org.apache.lucene.util.RamUsageEstimator;
* *
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.CharObjectHashMap * <p>Mostly forked and trimmed from com.carrotsearch.hppc.CharObjectHashMap
* *
* <p>github: https://github.com/carrotsearch/hppc release 0.9.0 * <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public class CharObjectHashMap<VType> public class CharObjectHashMap<VType>

View File

@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.internal.hppc;
/**
* Forked from HPPC, holding int index and double value.
*
* @lucene.internal
*/
public final class DoubleCursor {
/**
* The current value's index in the container this cursor belongs to. The meaning of this index is
* defined by the container (usually it will be an index in the underlying storage buffer).
*/
public int index;
/** The current value. */
public double value;
@Override
public String toString() {
return "[cursor, index: " + index + ", value: " + value + "]";
}
}

View File

@ -0,0 +1,446 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.internal.hppc;
import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS;
import java.util.Arrays;
import java.util.Iterator;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
/**
* An array-backed list of {@code float}.
*
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.FloatArrayList
*
* <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/
public class FloatArrayList implements Iterable<FloatCursor>, Cloneable, Accountable {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(FloatArrayList.class);
/** An immutable empty buffer (array). */
public static final float[] EMPTY_ARRAY = new float[0];
/**
* Internal array for storing the list. The array may be larger than the current size ({@link
* #size()}).
*/
public float[] buffer;
/** Current number of elements stored in {@link #buffer}. */
public int elementsCount;
/** New instance with sane defaults. */
public FloatArrayList() {
this(DEFAULT_EXPECTED_ELEMENTS);
}
/**
* New instance with sane defaults.
*
* @param expectedElements The expected number of elements guaranteed not to cause buffer
* expansion (inclusive).
*/
public FloatArrayList(int expectedElements) {
buffer = new float[expectedElements];
}
/** Creates a new list from the elements of another list in its iteration order. */
public FloatArrayList(FloatArrayList list) {
this(list.size());
addAll(list);
}
public void add(float e1) {
ensureBufferSpace(1);
buffer[elementsCount++] = e1;
}
/** Add all elements from a range of given array to the list. */
public void add(float[] elements, int start, int length) {
assert length >= 0 : "Length must be >= 0";
ensureBufferSpace(length);
System.arraycopy(elements, start, buffer, elementsCount, length);
elementsCount += length;
}
/**
* Vararg-signature method for adding elements at the end of the list.
*
* <p><b>This method is handy, but costly if used in tight loops (anonymous array passing)</b>
*/
/* */
public final void add(float... elements) {
add(elements, 0, elements.length);
}
/** Adds all elements from another list. */
public int addAll(FloatArrayList list) {
final int size = list.size();
ensureBufferSpace(size);
for (FloatCursor cursor : list) {
add(cursor.value);
}
return size;
}
/** Adds all elements from another iterable. */
public int addAll(Iterable<? extends FloatCursor> iterable) {
int size = 0;
for (FloatCursor cursor : iterable) {
add(cursor.value);
size++;
}
return size;
}
public void insert(int index, float e1) {
assert (index >= 0 && index <= size())
: "Index " + index + " out of bounds [" + 0 + ", " + size() + "].";
ensureBufferSpace(1);
System.arraycopy(buffer, index, buffer, index + 1, elementsCount - index);
buffer[index] = e1;
elementsCount++;
}
public float get(int index) {
assert (index >= 0 && index < size())
: "Index " + index + " out of bounds [" + 0 + ", " + size() + ").";
return buffer[index];
}
public float set(int index, float e1) {
assert (index >= 0 && index < size())
: "Index " + index + " out of bounds [" + 0 + ", " + size() + ").";
final float v = buffer[index];
buffer[index] = e1;
return v;
}
/** Removes the element at the specified position in this container and returns it. */
public float removeAt(int index) {
assert (index >= 0 && index < size())
: "Index " + index + " out of bounds [" + 0 + ", " + size() + ").";
final float v = buffer[index];
System.arraycopy(buffer, index + 1, buffer, index, --elementsCount - index);
return v;
}
/** Removes and returns the last element of this list. */
public float removeLast() {
assert !isEmpty() : "List is empty";
return buffer[--elementsCount];
}
/**
* Removes from this list all the elements with indexes between <code>fromIndex</code>, inclusive,
* and <code>toIndex</code>, exclusive.
*/
public void removeRange(int fromIndex, int toIndex) {
assert (fromIndex >= 0 && fromIndex <= size())
: "Index " + fromIndex + " out of bounds [" + 0 + ", " + size() + ").";
assert (toIndex >= 0 && toIndex <= size())
: "Index " + toIndex + " out of bounds [" + 0 + ", " + size() + "].";
assert fromIndex <= toIndex : "fromIndex must be <= toIndex: " + fromIndex + ", " + toIndex;
System.arraycopy(buffer, toIndex, buffer, fromIndex, elementsCount - toIndex);
final int count = toIndex - fromIndex;
elementsCount -= count;
}
/**
* Removes the first element that equals <code>e</code>, returning whether an element has been
* removed.
*/
public boolean removeElement(float e) {
return removeFirst(e) != -1;
}
/**
* Removes the first element that equals <code>e1</code>, returning its deleted position or <code>
* -1</code> if the element was not found.
*/
public int removeFirst(float e1) {
final int index = indexOf(e1);
if (index >= 0) removeAt(index);
return index;
}
/**
* Removes the last element that equals <code>e1</code>, returning its deleted position or <code>
* -1</code> if the element was not found.
*/
public int removeLast(float e1) {
final int index = lastIndexOf(e1);
if (index >= 0) removeAt(index);
return index;
}
/**
* Removes all occurrences of <code>e</code> from this collection.
*
* @param e Element to be removed from this collection, if present.
* @return The number of removed elements as a result of this call.
*/
public int removeAll(float e) {
int to = 0;
for (int from = 0; from < elementsCount; from++) {
if (((e) == (buffer[from]))) {
continue;
}
if (to != from) {
buffer[to] = buffer[from];
}
to++;
}
final int deleted = elementsCount - to;
this.elementsCount = to;
return deleted;
}
public boolean contains(float e1) {
return indexOf(e1) >= 0;
}
public int indexOf(float e1) {
for (int i = 0; i < elementsCount; i++) {
if (((e1) == (buffer[i]))) {
return i;
}
}
return -1;
}
public int lastIndexOf(float e1) {
for (int i = elementsCount - 1; i >= 0; i--) {
if (((e1) == (buffer[i]))) {
return i;
}
}
return -1;
}
public boolean isEmpty() {
return elementsCount == 0;
}
/**
* Ensure this container can hold at least the given number of elements without resizing its
* buffers.
*
* @param expectedElements The total number of elements, inclusive.
*/
public void ensureCapacity(int expectedElements) {
if (expectedElements > buffer.length) {
ensureBufferSpace(expectedElements - size());
}
}
/**
* Ensures the internal buffer has enough free slots to store <code>expectedAdditions</code>.
* Increases internal buffer size if needed.
*/
protected void ensureBufferSpace(int expectedAdditions) {
if (elementsCount + expectedAdditions > buffer.length) {
this.buffer = ArrayUtil.grow(buffer, elementsCount + expectedAdditions);
}
}
/**
* Truncate or expand the list to the new size. If the list is truncated, the buffer will not be
* reallocated (use {@link #trimToSize()} if you need a truncated buffer), but the truncated
* values will be reset to the default value (zero). If the list is expanded, the elements beyond
* the current size are initialized with JVM-defaults (zero or <code>null</code> values).
*/
public void resize(int newSize) {
if (newSize <= buffer.length) {
if (newSize < elementsCount) {
Arrays.fill(buffer, newSize, elementsCount, 0f);
} else {
Arrays.fill(buffer, elementsCount, newSize, 0f);
}
} else {
ensureCapacity(newSize);
}
this.elementsCount = newSize;
}
public int size() {
return elementsCount;
}
/** Trim the internal buffer to the current size. */
public void trimToSize() {
if (size() != this.buffer.length) {
this.buffer = toArray();
}
}
/**
* Sets the number of stored elements to zero. Releases and initializes the internal storage array
* to default values. To clear the list without cleaning the buffer, simply set the {@link
* #elementsCount} field to zero.
*/
public void clear() {
Arrays.fill(buffer, 0, elementsCount, 0f);
this.elementsCount = 0;
}
/** Sets the number of stored elements to zero and releases the internal storage array. */
public void release() {
this.buffer = EMPTY_ARRAY;
this.elementsCount = 0;
}
/** The returned array is sized to match exactly the number of elements of the stack. */
public float[] toArray() {
return ArrayUtil.copyOfSubArray(buffer, 0, elementsCount);
}
/**
* Clone this object. The returned clone will reuse the same hash function and array resizing
* strategy.
*/
@Override
public FloatArrayList clone() {
try {
final FloatArrayList cloned = (FloatArrayList) super.clone();
cloned.buffer = buffer.clone();
return cloned;
} catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
}
@Override
public int hashCode() {
int h = 1, max = elementsCount;
for (int i = 0; i < max; i++) {
h = 31 * h + BitMixer.mix(this.buffer[i]);
}
return h;
}
/**
* Returns <code>true</code> only if the other object is an instance of the same class and with
* the same elements.
*/
@Override
public boolean equals(Object obj) {
return (this == obj)
|| (obj != null && getClass() == obj.getClass() && equalElements(getClass().cast(obj)));
}
/** Compare index-aligned elements against another {@link FloatArrayList}. */
protected boolean equalElements(FloatArrayList other) {
int max = size();
if (other.size() != max) {
return false;
}
for (int i = 0; i < max; i++) {
if (!((get(i)) == (other.get(i)))) {
return false;
}
}
return true;
}
/** Convert the contents of this list to a human-friendly string. */
@Override
public String toString() {
return Arrays.toString(this.toArray());
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(buffer);
}
/** Sorts the elements in this list and returns this list. */
public FloatArrayList sort() {
Arrays.sort(buffer, 0, elementsCount);
return this;
}
/** Reverses the elements in this list and returns this list. */
public FloatArrayList reverse() {
for (int i = 0, mid = elementsCount >> 1, j = elementsCount - 1; i < mid; i++, j--) {
float tmp = buffer[i];
buffer[i] = buffer[j];
buffer[j] = tmp;
}
return this;
}
/** An iterator implementation for {@link FloatArrayList#iterator}. */
static final class ValueIterator extends AbstractIterator<FloatCursor> {
private final FloatCursor cursor;
private final float[] buffer;
private final int size;
public ValueIterator(float[] buffer, int size) {
this.cursor = new FloatCursor();
this.cursor.index = -1;
this.size = size;
this.buffer = buffer;
}
@Override
protected FloatCursor fetch() {
if (cursor.index + 1 == size) return done();
cursor.value = buffer[++cursor.index];
return cursor;
}
}
@Override
public Iterator<FloatCursor> iterator() {
return new ValueIterator(buffer, size());
}
/**
* Create a list from a variable number of arguments or an array of <code>int</code>. The elements
* are copied from the argument to the internal buffer.
*/
/* */
public static FloatArrayList from(float... elements) {
final FloatArrayList list = new FloatArrayList(elements.length);
list.add(elements);
return list;
}
}

View File

@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.internal.hppc;
/**
* Forked from HPPC, holding int index and float value.
*
* @lucene.internal
*/
public final class FloatCursor {
/**
* The current value's index in the container this cursor belongs to. The meaning of this index is
* defined by the container (usually it will be an index in the underlying storage buffer).
*/
public int index;
/** The current value. */
public float value;
@Override
public String toString() {
return "[cursor, index: " + index + ", value: " + value + "]";
}
}

View File

@ -15,13 +15,17 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.apache.lucene.util.BitUtil.nextHighestPowerOfTwo; import static org.apache.lucene.util.BitUtil.nextHighestPowerOfTwo;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
/** Constants for primitive maps. */ /**
* Constants for primitive maps.
*
* @lucene.internal
*/
class HashContainers { class HashContainers {
static final int DEFAULT_EXPECTED_ELEMENTS = 4; static final int DEFAULT_EXPECTED_ELEMENTS = 4;

View File

@ -15,9 +15,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
@ -31,7 +31,9 @@ import org.apache.lucene.util.RamUsageEstimator;
* *
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntArrayList * <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntArrayList
* *
* <p>github: https://github.com/carrotsearch/hppc release 0.9.0 * <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/ */
public class IntArrayList implements Iterable<IntCursor>, Cloneable, Accountable { public class IntArrayList implements Iterable<IntCursor>, Cloneable, Accountable {
private static final long BASE_RAM_BYTES_USED = private static final long BASE_RAM_BYTES_USED =
@ -40,13 +42,11 @@ public class IntArrayList implements Iterable<IntCursor>, Cloneable, Accountable
/** An immutable empty buffer (array). */ /** An immutable empty buffer (array). */
public static final int[] EMPTY_ARRAY = new int[0]; public static final int[] EMPTY_ARRAY = new int[0];
;
/** /**
* Internal array for storing the list. The array may be larger than the current size ({@link * Internal array for storing the list. The array may be larger than the current size ({@link
* #size()}). * #size()}).
*/ */
public int[] buffer = EMPTY_ARRAY; public int[] buffer;
/** Current number of elements stored in {@link #buffer}. */ /** Current number of elements stored in {@link #buffer}. */
public int elementsCount; public int elementsCount;
@ -262,8 +262,7 @@ public class IntArrayList implements Iterable<IntCursor>, Cloneable, Accountable
* @param expectedElements The total number of elements, inclusive. * @param expectedElements The total number of elements, inclusive.
*/ */
public void ensureCapacity(int expectedElements) { public void ensureCapacity(int expectedElements) {
final int bufferLen = (buffer == null ? 0 : buffer.length); if (expectedElements > buffer.length) {
if (expectedElements > bufferLen) {
ensureBufferSpace(expectedElements - size()); ensureBufferSpace(expectedElements - size());
} }
} }
@ -273,8 +272,7 @@ public class IntArrayList implements Iterable<IntCursor>, Cloneable, Accountable
* Increases internal buffer size if needed. * Increases internal buffer size if needed.
*/ */
protected void ensureBufferSpace(int expectedAdditions) { protected void ensureBufferSpace(int expectedAdditions) {
final int bufferLen = (buffer == null ? 0 : buffer.length); if (elementsCount + expectedAdditions > buffer.length) {
if (elementsCount + expectedAdditions > bufferLen) {
this.buffer = ArrayUtil.grow(buffer, elementsCount + expectedAdditions); this.buffer = ArrayUtil.grow(buffer, elementsCount + expectedAdditions);
} }
} }

View File

@ -15,9 +15,13 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
/** Forked from HPPC, holding int index and int value */ /**
* Forked from HPPC, holding int index and int value.
*
* @lucene.internal
*/
public final class IntCursor { public final class IntCursor {
/** /**
* The current value's index in the container this cursor belongs to. The meaning of this index is * The current value's index in the container this cursor belongs to. The meaning of this index is

View File

@ -0,0 +1,853 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.internal.hppc;
import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS;
import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_LOAD_FACTOR;
import static org.apache.lucene.internal.hppc.HashContainers.ITERATION_SEED;
import static org.apache.lucene.internal.hppc.HashContainers.MAX_LOAD_FACTOR;
import static org.apache.lucene.internal.hppc.HashContainers.MIN_LOAD_FACTOR;
import static org.apache.lucene.internal.hppc.HashContainers.checkLoadFactor;
import static org.apache.lucene.internal.hppc.HashContainers.checkPowerOfTwo;
import static org.apache.lucene.internal.hppc.HashContainers.expandAtCount;
import static org.apache.lucene.internal.hppc.HashContainers.iterationIncrement;
import static org.apache.lucene.internal.hppc.HashContainers.minBufferSize;
import static org.apache.lucene.internal.hppc.HashContainers.nextBufferSize;
import java.util.Arrays;
import java.util.Iterator;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.RamUsageEstimator;
/**
* A hash map of <code>int</code> to <code>double</code>, implemented using open addressing with
* linear probing for collision resolution.
*
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntDoubleHashMap
*
* <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/
public class IntDoubleHashMap
implements Iterable<IntDoubleHashMap.IntDoubleCursor>, Accountable, Cloneable {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(IntDoubleHashMap.class);
/** The array holding keys. */
public int[] keys;
/** The array holding values. */
public double[] values;
/**
* The number of stored keys (assigned key slots), excluding the special "empty" key, if any (use
* {@link #size()} instead).
*
* @see #size()
*/
protected int assigned;
/** Mask for slot scans in {@link #keys}. */
protected int mask;
/** Expand (rehash) {@link #keys} when {@link #assigned} hits this value. */
protected int resizeAt;
/** Special treatment for the "empty slot" key marker. */
protected boolean hasEmptyKey;
/** The load factor for {@link #keys}. */
protected double loadFactor;
/** Seed used to ensure the hash iteration order is different from an iteration to another. */
protected int iterationSeed;
/** New instance with sane defaults. */
public IntDoubleHashMap() {
this(DEFAULT_EXPECTED_ELEMENTS);
}
/**
* New instance with sane defaults.
*
* @param expectedElements The expected number of elements guaranteed not to cause buffer
* expansion (inclusive).
*/
public IntDoubleHashMap(int expectedElements) {
this(expectedElements, DEFAULT_LOAD_FACTOR);
}
/**
* New instance with the provided defaults.
*
* @param expectedElements The expected number of elements guaranteed not to cause a rehash
* (inclusive).
* @param loadFactor The load factor for internal buffers. Insane load factors (zero, full
* capacity) are rejected by {@link #verifyLoadFactor(double)}.
*/
public IntDoubleHashMap(int expectedElements, double loadFactor) {
this.loadFactor = verifyLoadFactor(loadFactor);
iterationSeed = ITERATION_SEED.incrementAndGet();
ensureCapacity(expectedElements);
}
/** Create a hash map from all key-value pairs of another map. */
public IntDoubleHashMap(IntDoubleHashMap map) {
this(map.size());
putAll(map);
}
public double put(int key, double value) {
assert assigned < mask + 1;
final int mask = this.mask;
if (((key) == 0)) {
double previousValue = hasEmptyKey ? values[mask + 1] : 0;
hasEmptyKey = true;
values[mask + 1] = value;
return previousValue;
} else {
final int[] keys = this.keys;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
final double previousValue = values[slot];
values[slot] = value;
return previousValue;
}
slot = (slot + 1) & mask;
}
if (assigned == resizeAt) {
allocateThenInsertThenRehash(slot, key, value);
} else {
keys[slot] = key;
values[slot] = value;
}
assigned++;
return 0;
}
}
public int putAll(Iterable<? extends IntDoubleCursor> iterable) {
final int count = size();
for (IntDoubleCursor c : iterable) {
put(c.key, c.value);
}
return size() - count;
}
/**
* <a href="http://trove4j.sourceforge.net">Trove</a>-inspired API method. An equivalent of the
* following code:
*
* <pre>
* if (!map.containsKey(key)) map.put(value);
* </pre>
*
* @param key The key of the value to check.
* @param value The value to put if <code>key</code> does not exist.
* @return <code>true</code> if <code>key</code> did not exist and <code>value</code> was placed
* in the map.
*/
public boolean putIfAbsent(int key, double value) {
int keyIndex = indexOf(key);
if (!indexExists(keyIndex)) {
indexInsert(keyIndex, key, value);
return true;
} else {
return false;
}
}
/**
* If <code>key</code> exists, <code>putValue</code> is inserted into the map, otherwise any
* existing value is incremented by <code>additionValue</code>.
*
* @param key The key of the value to adjust.
* @param putValue The value to put if <code>key</code> does not exist.
* @param incrementValue The value to add to the existing value if <code>key</code> exists.
* @return Returns the current value associated with <code>key</code> (after changes).
*/
public double putOrAdd(int key, double putValue, double incrementValue) {
assert assigned < mask + 1;
int keyIndex = indexOf(key);
if (indexExists(keyIndex)) {
putValue = values[keyIndex] + incrementValue;
indexReplace(keyIndex, putValue);
} else {
indexInsert(keyIndex, key, putValue);
}
return putValue;
}
/**
* Adds <code>incrementValue</code> to any existing value for the given <code>key</code> or
* inserts <code>incrementValue</code> if <code>key</code> did not previously exist.
*
* @param key The key of the value to adjust.
* @param incrementValue The value to put or add to the existing value if <code>key</code> exists.
* @return Returns the current value associated with <code>key</code> (after changes).
*/
public double addTo(int key, double incrementValue) {
return putOrAdd(key, incrementValue, incrementValue);
}
public double remove(int key) {
final int mask = this.mask;
if (((key) == 0)) {
if (!hasEmptyKey) {
return 0;
}
hasEmptyKey = false;
double previousValue = values[mask + 1];
values[mask + 1] = 0;
return previousValue;
} else {
final int[] keys = this.keys;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
final double previousValue = values[slot];
shiftConflictingKeys(slot);
return previousValue;
}
slot = (slot + 1) & mask;
}
return 0;
}
}
public double get(int key) {
if (((key) == 0)) {
return hasEmptyKey ? values[mask + 1] : 0;
} else {
final int[] keys = this.keys;
final int mask = this.mask;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return values[slot];
}
slot = (slot + 1) & mask;
}
return 0;
}
}
public double getOrDefault(int key, double defaultValue) {
if (((key) == 0)) {
return hasEmptyKey ? values[mask + 1] : defaultValue;
} else {
final int[] keys = this.keys;
final int mask = this.mask;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return values[slot];
}
slot = (slot + 1) & mask;
}
return defaultValue;
}
}
public boolean containsKey(int key) {
if (((key) == 0)) {
return hasEmptyKey;
} else {
final int[] keys = this.keys;
final int mask = this.mask;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return true;
}
slot = (slot + 1) & mask;
}
return false;
}
}
public int indexOf(int key) {
final int mask = this.mask;
if (((key) == 0)) {
return hasEmptyKey ? mask + 1 : ~(mask + 1);
} else {
final int[] keys = this.keys;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return slot;
}
slot = (slot + 1) & mask;
}
return ~slot;
}
}
public boolean indexExists(int index) {
assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey);
return index >= 0;
}
public double indexGet(int index) {
assert index >= 0 : "The index must point at an existing key.";
assert index <= mask || (index == mask + 1 && hasEmptyKey);
return values[index];
}
public double indexReplace(int index, double newValue) {
assert index >= 0 : "The index must point at an existing key.";
assert index <= mask || (index == mask + 1 && hasEmptyKey);
double previousValue = values[index];
values[index] = newValue;
return previousValue;
}
public void indexInsert(int index, int key, double value) {
assert index < 0 : "The index must not point at an existing key.";
index = ~index;
if (((key) == 0)) {
assert index == mask + 1;
values[index] = value;
hasEmptyKey = true;
} else {
assert ((keys[index]) == 0);
if (assigned == resizeAt) {
allocateThenInsertThenRehash(index, key, value);
} else {
keys[index] = key;
values[index] = value;
}
assigned++;
}
}
public double indexRemove(int index) {
assert index >= 0 : "The index must point at an existing key.";
assert index <= mask || (index == mask + 1 && hasEmptyKey);
double previousValue = values[index];
if (index > mask) {
assert index == mask + 1;
hasEmptyKey = false;
values[index] = 0;
} else {
shiftConflictingKeys(index);
}
return previousValue;
}
public void clear() {
assigned = 0;
hasEmptyKey = false;
Arrays.fill(keys, 0);
/* */
}
public void release() {
assigned = 0;
hasEmptyKey = false;
keys = null;
values = null;
ensureCapacity(DEFAULT_EXPECTED_ELEMENTS);
}
public int size() {
return assigned + (hasEmptyKey ? 1 : 0);
}
public boolean isEmpty() {
return size() == 0;
}
@Override
public int hashCode() {
int h = hasEmptyKey ? 0xDEADBEEF : 0;
for (IntDoubleCursor c : this) {
h += BitMixer.mix(c.key) + BitMixer.mix(c.value);
}
return h;
}
@Override
public boolean equals(Object obj) {
return (this == obj)
|| (obj != null && getClass() == obj.getClass() && equalElements(getClass().cast(obj)));
}
/** Return true if all keys of some other container exist in this container. */
protected boolean equalElements(IntDoubleHashMap other) {
if (other.size() != size()) {
return false;
}
for (IntDoubleCursor c : other) {
int key = c.key;
if (!containsKey(key)
|| !(Double.doubleToLongBits(c.value) == Double.doubleToLongBits(get(key)))) {
return false;
}
}
return true;
}
/**
* Ensure this container can hold at least the given number of keys (entries) without resizing its
* buffers.
*
* @param expectedElements The total number of keys, inclusive.
*/
public void ensureCapacity(int expectedElements) {
if (expectedElements > resizeAt || keys == null) {
final int[] prevKeys = this.keys;
final double[] prevValues = this.values;
allocateBuffers(minBufferSize(expectedElements, loadFactor));
if (prevKeys != null && !isEmpty()) {
rehash(prevKeys, prevValues);
}
}
}
/**
* Provides the next iteration seed used to build the iteration starting slot and offset
* increment. This method does not need to be synchronized, what matters is that each thread gets
* a sequence of varying seeds.
*/
protected int nextIterationSeed() {
return iterationSeed = BitMixer.mixPhi(iterationSeed);
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(keys) + RamUsageEstimator.sizeOf(values);
}
/** An iterator implementation for {@link #iterator}. */
private final class EntryIterator extends AbstractIterator<IntDoubleCursor> {
private final IntDoubleCursor cursor;
private final int increment;
private int index;
private int slot;
public EntryIterator() {
cursor = new IntDoubleCursor();
int seed = nextIterationSeed();
increment = iterationIncrement(seed);
slot = seed & mask;
}
@Override
protected IntDoubleCursor fetch() {
final int mask = IntDoubleHashMap.this.mask;
while (index <= mask) {
int existing;
index++;
slot = (slot + increment) & mask;
if (!((existing = keys[slot]) == 0)) {
cursor.index = slot;
cursor.key = existing;
cursor.value = values[slot];
return cursor;
}
}
if (index == mask + 1 && hasEmptyKey) {
cursor.index = index;
cursor.key = 0;
cursor.value = values[index++];
return cursor;
}
return done();
}
}
@Override
public Iterator<IntDoubleCursor> iterator() {
return new EntryIterator();
}
/** Returns a specialized view of the keys of this associated container. */
public KeysContainer keys() {
return new KeysContainer();
}
/** A view of the keys inside this hash map. */
public final class KeysContainer implements Iterable<IntCursor> {
@Override
public Iterator<IntCursor> iterator() {
return new KeysIterator();
}
public int size() {
return IntDoubleHashMap.this.size();
}
public int[] toArray() {
int[] array = new int[size()];
int i = 0;
for (IntCursor cursor : this) {
array[i++] = cursor.value;
}
return array;
}
}
/** An iterator over the set of assigned keys. */
private final class KeysIterator extends AbstractIterator<IntCursor> {
private final IntCursor cursor;
private final int increment;
private int index;
private int slot;
public KeysIterator() {
cursor = new IntCursor();
int seed = nextIterationSeed();
increment = iterationIncrement(seed);
slot = seed & mask;
}
@Override
protected IntCursor fetch() {
final int mask = IntDoubleHashMap.this.mask;
while (index <= mask) {
int existing;
index++;
slot = (slot + increment) & mask;
if (!((existing = keys[slot]) == 0)) {
cursor.index = slot;
cursor.value = existing;
return cursor;
}
}
if (index == mask + 1 && hasEmptyKey) {
cursor.index = index++;
cursor.value = 0;
return cursor;
}
return done();
}
}
/**
* @return Returns a container with all values stored in this map.
*/
public ValuesContainer values() {
return new ValuesContainer();
}
/** A view over the set of values of this map. */
public final class ValuesContainer implements Iterable<DoubleCursor> {
@Override
public Iterator<DoubleCursor> iterator() {
return new ValuesIterator();
}
public int size() {
return IntDoubleHashMap.this.size();
}
public double[] toArray() {
double[] array = new double[size()];
int i = 0;
for (DoubleCursor cursor : this) {
array[i++] = cursor.value;
}
return array;
}
}
/** An iterator over the set of assigned values. */
private final class ValuesIterator extends AbstractIterator<DoubleCursor> {
private final DoubleCursor cursor;
private final int increment;
private int index;
private int slot;
public ValuesIterator() {
cursor = new DoubleCursor();
int seed = nextIterationSeed();
increment = iterationIncrement(seed);
slot = seed & mask;
}
@Override
protected DoubleCursor fetch() {
final int mask = IntDoubleHashMap.this.mask;
while (index <= mask) {
index++;
slot = (slot + increment) & mask;
if (!((keys[slot]) == 0)) {
cursor.index = slot;
cursor.value = values[slot];
return cursor;
}
}
if (index == mask + 1 && hasEmptyKey) {
cursor.index = index;
cursor.value = values[index++];
return cursor;
}
return done();
}
}
@Override
public IntDoubleHashMap clone() {
try {
/* */
IntDoubleHashMap cloned = (IntDoubleHashMap) super.clone();
cloned.keys = keys.clone();
cloned.values = values.clone();
cloned.hasEmptyKey = hasEmptyKey;
cloned.iterationSeed = ITERATION_SEED.incrementAndGet();
return cloned;
} catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
}
/** Convert the contents of this map to a human-friendly string. */
@Override
public String toString() {
final StringBuilder buffer = new StringBuilder();
buffer.append("[");
boolean first = true;
for (IntDoubleCursor cursor : this) {
if (!first) {
buffer.append(", ");
}
buffer.append(cursor.key);
buffer.append("=>");
buffer.append(cursor.value);
first = false;
}
buffer.append("]");
return buffer.toString();
}
/** Creates a hash map from two index-aligned arrays of key-value pairs. */
public static IntDoubleHashMap from(int[] keys, double[] values) {
if (keys.length != values.length) {
throw new IllegalArgumentException(
"Arrays of keys and values must have an identical length.");
}
IntDoubleHashMap map = new IntDoubleHashMap(keys.length);
for (int i = 0; i < keys.length; i++) {
map.put(keys[i], values[i]);
}
return map;
}
/**
* Returns a hash code for the given key.
*
* <p>The output from this function should evenly distribute keys across the entire integer range.
*/
protected int hashKey(int key) {
assert !((key) == 0); // Handled as a special case (empty slot marker).
return BitMixer.mixPhi(key);
}
/**
* Validate load factor range and return it. Override and suppress if you need insane load
* factors.
*/
protected double verifyLoadFactor(double loadFactor) {
checkLoadFactor(loadFactor, MIN_LOAD_FACTOR, MAX_LOAD_FACTOR);
return loadFactor;
}
/** Rehash from old buffers to new buffers. */
protected void rehash(int[] fromKeys, double[] fromValues) {
assert fromKeys.length == fromValues.length && checkPowerOfTwo(fromKeys.length - 1);
// Rehash all stored key/value pairs into the new buffers.
final int[] keys = this.keys;
final double[] values = this.values;
final int mask = this.mask;
int existing;
// Copy the zero element's slot, then rehash everything else.
int from = fromKeys.length - 1;
keys[keys.length - 1] = fromKeys[from];
values[values.length - 1] = fromValues[from];
while (--from >= 0) {
if (!((existing = fromKeys[from]) == 0)) {
int slot = hashKey(existing) & mask;
while (!((keys[slot]) == 0)) {
slot = (slot + 1) & mask;
}
keys[slot] = existing;
values[slot] = fromValues[from];
}
}
}
/**
* Allocate new internal buffers. This method attempts to allocate and assign internal buffers
* atomically (either allocations succeed or not).
*/
protected void allocateBuffers(int arraySize) {
assert Integer.bitCount(arraySize) == 1;
// Ensure no change is done if we hit an OOM.
int[] prevKeys = this.keys;
double[] prevValues = this.values;
try {
int emptyElementSlot = 1;
this.keys = (new int[arraySize + emptyElementSlot]);
this.values = (new double[arraySize + emptyElementSlot]);
} catch (OutOfMemoryError e) {
this.keys = prevKeys;
this.values = prevValues;
throw new BufferAllocationException(
"Not enough memory to allocate buffers for rehashing: %,d -> %,d",
e, this.mask + 1, arraySize);
}
this.resizeAt = expandAtCount(arraySize, loadFactor);
this.mask = arraySize - 1;
}
/**
* This method is invoked when there is a new key/ value pair to be inserted into the buffers but
* there is not enough empty slots to do so.
*
* <p>New buffers are allocated. If this succeeds, we know we can proceed with rehashing so we
* assign the pending element to the previous buffer (possibly violating the invariant of having
* at least one empty slot) and rehash all keys, substituting new buffers at the end.
*/
protected void allocateThenInsertThenRehash(int slot, int pendingKey, double pendingValue) {
assert assigned == resizeAt && ((keys[slot]) == 0) && !((pendingKey) == 0);
// Try to allocate new buffers first. If we OOM, we leave in a consistent state.
final int[] prevKeys = this.keys;
final double[] prevValues = this.values;
allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor));
assert this.keys.length > prevKeys.length;
// We have succeeded at allocating new data so insert the pending key/value at
// the free slot in the old arrays before rehashing.
prevKeys[slot] = pendingKey;
prevValues[slot] = pendingValue;
// Rehash old keys, including the pending key.
rehash(prevKeys, prevValues);
}
/**
* Shift all the slot-conflicting keys and values allocated to (and including) <code>slot</code>.
*/
protected void shiftConflictingKeys(int gapSlot) {
final int[] keys = this.keys;
final double[] values = this.values;
final int mask = this.mask;
// Perform shifts of conflicting keys to fill in the gap.
int distance = 0;
while (true) {
final int slot = (gapSlot + (++distance)) & mask;
final int existing = keys[slot];
if (((existing) == 0)) {
break;
}
final int idealSlot = hashKey(existing);
final int shift = (slot - idealSlot) & mask;
if (shift >= distance) {
// Entry at this position was originally at or before the gap slot.
// Move the conflict-shifted entry to the gap's position and repeat the procedure
// for any entries to the right of the current position, treating it
// as the new gap.
keys[gapSlot] = existing;
values[gapSlot] = values[slot];
gapSlot = slot;
distance = 0;
}
}
// Mark the last found gap slot without a conflict as empty.
keys[gapSlot] = 0;
values[gapSlot] = 0;
assigned--;
}
/** Forked from HPPC, holding int index,key and value */
public static final class IntDoubleCursor {
/**
* The current key and value's index in the container this cursor beints to. The meaning of this
* index is defined by the container (usually it will be an index in the underlying storage
* buffer).
*/
public int index;
/** The current key. */
public int key;
/** The current value. */
public double value;
@Override
public String toString() {
return "[cursor, index: " + index + ", key: " + key + ", value: " + value + "]";
}
}
}

View File

@ -0,0 +1,842 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.internal.hppc;
import java.util.Arrays;
import java.util.Iterator;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.RamUsageEstimator;
/**
* A hash map of <code>int</code> to <code>float</code>, implemented using open addressing with
* linear probing for collision resolution.
*
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntFloatHashMap
*
* <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/
public class IntFloatHashMap
implements Iterable<IntFloatHashMap.IntFloatCursor>, Accountable, Cloneable {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(IntFloatHashMap.class);
/** The array holding keys. */
public int[] keys;
/** The array holding values. */
public float[] values;
/**
* The number of stored keys (assigned key slots), excluding the special "empty" key, if any (use
* {@link #size()} instead).
*
* @see #size()
*/
protected int assigned;
/** Mask for slot scans in {@link #keys}. */
protected int mask;
/** Expand (rehash) {@link #keys} when {@link #assigned} hits this value. */
protected int resizeAt;
/** Special treatment for the "empty slot" key marker. */
protected boolean hasEmptyKey;
/** The load factor for {@link #keys}. */
protected double loadFactor;
/** Seed used to ensure the hash iteration order is different from an iteration to another. */
protected int iterationSeed;
/** New instance with sane defaults. */
public IntFloatHashMap() {
this(HashContainers.DEFAULT_EXPECTED_ELEMENTS);
}
/**
* New instance with sane defaults.
*
* @param expectedElements The expected number of elements guaranteed not to cause buffer
* expansion (inclusive).
*/
public IntFloatHashMap(int expectedElements) {
this(expectedElements, HashContainers.DEFAULT_LOAD_FACTOR);
}
/**
* New instance with the provided defaults.
*
* @param expectedElements The expected number of elements guaranteed not to cause a rehash
* (inclusive).
* @param loadFactor The load factor for internal buffers. Insane load factors (zero, full
* capacity) are rejected by {@link #verifyLoadFactor(double)}.
*/
public IntFloatHashMap(int expectedElements, double loadFactor) {
this.loadFactor = verifyLoadFactor(loadFactor);
iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet();
ensureCapacity(expectedElements);
}
/** Create a hash map from all key-value pairs of another map. */
public IntFloatHashMap(IntFloatHashMap map) {
this(map.size());
putAll(map);
}
public float put(int key, float value) {
assert assigned < mask + 1;
final int mask = this.mask;
if (((key) == 0)) {
float previousValue = hasEmptyKey ? values[mask + 1] : 0;
hasEmptyKey = true;
values[mask + 1] = value;
return previousValue;
} else {
final int[] keys = this.keys;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
final float previousValue = values[slot];
values[slot] = value;
return previousValue;
}
slot = (slot + 1) & mask;
}
if (assigned == resizeAt) {
allocateThenInsertThenRehash(slot, key, value);
} else {
keys[slot] = key;
values[slot] = value;
}
assigned++;
return 0;
}
}
public int putAll(Iterable<? extends IntFloatCursor> iterable) {
final int count = size();
for (IntFloatCursor c : iterable) {
put(c.key, c.value);
}
return size() - count;
}
/**
* <a href="http://trove4j.sourceforge.net">Trove</a>-inspired API method. An equivalent of the
* following code:
*
* <pre>
* if (!map.containsKey(key)) map.put(value);
* </pre>
*
* @param key The key of the value to check.
* @param value The value to put if <code>key</code> does not exist.
* @return <code>true</code> if <code>key</code> did not exist and <code>value</code> was placed
* in the map.
*/
public boolean putIfAbsent(int key, float value) {
int keyIndex = indexOf(key);
if (!indexExists(keyIndex)) {
indexInsert(keyIndex, key, value);
return true;
} else {
return false;
}
}
/**
* If <code>key</code> exists, <code>putValue</code> is inserted into the map, otherwise any
* existing value is incremented by <code>additionValue</code>.
*
* @param key The key of the value to adjust.
* @param putValue The value to put if <code>key</code> does not exist.
* @param incrementValue The value to add to the existing value if <code>key</code> exists.
* @return Returns the current value associated with <code>key</code> (after changes).
*/
public float putOrAdd(int key, float putValue, float incrementValue) {
assert assigned < mask + 1;
int keyIndex = indexOf(key);
if (indexExists(keyIndex)) {
putValue = values[keyIndex] + incrementValue;
indexReplace(keyIndex, putValue);
} else {
indexInsert(keyIndex, key, putValue);
}
return putValue;
}
/**
* Adds <code>incrementValue</code> to any existing value for the given <code>key</code> or
* inserts <code>incrementValue</code> if <code>key</code> did not previously exist.
*
* @param key The key of the value to adjust.
* @param incrementValue The value to put or add to the existing value if <code>key</code> exists.
* @return Returns the current value associated with <code>key</code> (after changes).
*/
public float addTo(int key, float incrementValue) {
return putOrAdd(key, incrementValue, incrementValue);
}
public float remove(int key) {
final int mask = this.mask;
if (((key) == 0)) {
if (!hasEmptyKey) {
return 0;
}
hasEmptyKey = false;
float previousValue = values[mask + 1];
values[mask + 1] = 0;
return previousValue;
} else {
final int[] keys = this.keys;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
final float previousValue = values[slot];
shiftConflictingKeys(slot);
return previousValue;
}
slot = (slot + 1) & mask;
}
return 0;
}
}
public float get(int key) {
if (((key) == 0)) {
return hasEmptyKey ? values[mask + 1] : 0;
} else {
final int[] keys = this.keys;
final int mask = this.mask;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return values[slot];
}
slot = (slot + 1) & mask;
}
return 0;
}
}
public float getOrDefault(int key, float defaultValue) {
if (((key) == 0)) {
return hasEmptyKey ? values[mask + 1] : defaultValue;
} else {
final int[] keys = this.keys;
final int mask = this.mask;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return values[slot];
}
slot = (slot + 1) & mask;
}
return defaultValue;
}
}
public boolean containsKey(int key) {
if (((key) == 0)) {
return hasEmptyKey;
} else {
final int[] keys = this.keys;
final int mask = this.mask;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return true;
}
slot = (slot + 1) & mask;
}
return false;
}
}
public int indexOf(int key) {
final int mask = this.mask;
if (((key) == 0)) {
return hasEmptyKey ? mask + 1 : ~(mask + 1);
} else {
final int[] keys = this.keys;
int slot = hashKey(key) & mask;
int existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return slot;
}
slot = (slot + 1) & mask;
}
return ~slot;
}
}
public boolean indexExists(int index) {
assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey);
return index >= 0;
}
public float indexGet(int index) {
assert index >= 0 : "The index must point at an existing key.";
assert index <= mask || (index == mask + 1 && hasEmptyKey);
return values[index];
}
public float indexReplace(int index, float newValue) {
assert index >= 0 : "The index must point at an existing key.";
assert index <= mask || (index == mask + 1 && hasEmptyKey);
float previousValue = values[index];
values[index] = newValue;
return previousValue;
}
public void indexInsert(int index, int key, float value) {
assert index < 0 : "The index must not point at an existing key.";
index = ~index;
if (((key) == 0)) {
assert index == mask + 1;
values[index] = value;
hasEmptyKey = true;
} else {
assert ((keys[index]) == 0);
if (assigned == resizeAt) {
allocateThenInsertThenRehash(index, key, value);
} else {
keys[index] = key;
values[index] = value;
}
assigned++;
}
}
public float indexRemove(int index) {
assert index >= 0 : "The index must point at an existing key.";
assert index <= mask || (index == mask + 1 && hasEmptyKey);
float previousValue = values[index];
if (index > mask) {
assert index == mask + 1;
hasEmptyKey = false;
values[index] = 0;
} else {
shiftConflictingKeys(index);
}
return previousValue;
}
public void clear() {
assigned = 0;
hasEmptyKey = false;
Arrays.fill(keys, 0);
/* */
}
public void release() {
assigned = 0;
hasEmptyKey = false;
keys = null;
values = null;
ensureCapacity(HashContainers.DEFAULT_EXPECTED_ELEMENTS);
}
public int size() {
return assigned + (hasEmptyKey ? 1 : 0);
}
public boolean isEmpty() {
return size() == 0;
}
@Override
public int hashCode() {
int h = hasEmptyKey ? 0xDEADBEEF : 0;
for (IntFloatCursor c : this) {
h += BitMixer.mix(c.key) + BitMixer.mix(c.value);
}
return h;
}
@Override
public boolean equals(Object obj) {
return (this == obj)
|| (obj != null && getClass() == obj.getClass() && equalElements(getClass().cast(obj)));
}
/** Return true if all keys of some other container exist in this container. */
protected boolean equalElements(IntFloatHashMap other) {
if (other.size() != size()) {
return false;
}
for (IntFloatCursor c : other) {
int key = c.key;
if (!containsKey(key) || !(Float.floatToIntBits(c.value) == Float.floatToIntBits(get(key)))) {
return false;
}
}
return true;
}
/**
* Ensure this container can hold at least the given number of keys (entries) without resizing its
* buffers.
*
* @param expectedElements The total number of keys, inclusive.
*/
public void ensureCapacity(int expectedElements) {
if (expectedElements > resizeAt || keys == null) {
final int[] prevKeys = this.keys;
final float[] prevValues = this.values;
allocateBuffers(HashContainers.minBufferSize(expectedElements, loadFactor));
if (prevKeys != null && !isEmpty()) {
rehash(prevKeys, prevValues);
}
}
}
/**
* Provides the next iteration seed used to build the iteration starting slot and offset
* increment. This method does not need to be synchronized, what matters is that each thread gets
* a sequence of varying seeds.
*/
protected int nextIterationSeed() {
return iterationSeed = BitMixer.mixPhi(iterationSeed);
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(keys) + RamUsageEstimator.sizeOf(values);
}
/** An iterator implementation for {@link #iterator}. */
private final class EntryIterator extends AbstractIterator<IntFloatCursor> {
private final IntFloatCursor cursor;
private final int increment;
private int index;
private int slot;
public EntryIterator() {
cursor = new IntFloatCursor();
int seed = nextIterationSeed();
increment = HashContainers.iterationIncrement(seed);
slot = seed & mask;
}
@Override
protected IntFloatCursor fetch() {
final int mask = IntFloatHashMap.this.mask;
while (index <= mask) {
int existing;
index++;
slot = (slot + increment) & mask;
if (!((existing = keys[slot]) == 0)) {
cursor.index = slot;
cursor.key = existing;
cursor.value = values[slot];
return cursor;
}
}
if (index == mask + 1 && hasEmptyKey) {
cursor.index = index;
cursor.key = 0;
cursor.value = values[index++];
return cursor;
}
return done();
}
}
@Override
public Iterator<IntFloatCursor> iterator() {
return new EntryIterator();
}
/** Returns a specialized view of the keys of this associated container. */
public KeysContainer keys() {
return new KeysContainer();
}
/** A view of the keys inside this hash map. */
public final class KeysContainer implements Iterable<IntCursor> {
@Override
public Iterator<IntCursor> iterator() {
return new KeysIterator();
}
public int size() {
return IntFloatHashMap.this.size();
}
public int[] toArray() {
int[] array = new int[size()];
int i = 0;
for (IntCursor cursor : this) {
array[i++] = cursor.value;
}
return array;
}
}
/** An iterator over the set of assigned keys. */
private final class KeysIterator extends AbstractIterator<IntCursor> {
private final IntCursor cursor;
private final int increment;
private int index;
private int slot;
public KeysIterator() {
cursor = new IntCursor();
int seed = nextIterationSeed();
increment = HashContainers.iterationIncrement(seed);
slot = seed & mask;
}
@Override
protected IntCursor fetch() {
final int mask = IntFloatHashMap.this.mask;
while (index <= mask) {
int existing;
index++;
slot = (slot + increment) & mask;
if (!((existing = keys[slot]) == 0)) {
cursor.index = slot;
cursor.value = existing;
return cursor;
}
}
if (index == mask + 1 && hasEmptyKey) {
cursor.index = index++;
cursor.value = 0;
return cursor;
}
return done();
}
}
/**
* @return Returns a container with all values stored in this map.
*/
public ValuesContainer values() {
return new ValuesContainer();
}
/** A view over the set of values of this map. */
public final class ValuesContainer implements Iterable<FloatCursor> {
@Override
public Iterator<FloatCursor> iterator() {
return new ValuesIterator();
}
public int size() {
return IntFloatHashMap.this.size();
}
public float[] toArray() {
float[] array = new float[size()];
int i = 0;
for (FloatCursor cursor : this) {
array[i++] = cursor.value;
}
return array;
}
}
/** An iterator over the set of assigned values. */
private final class ValuesIterator extends AbstractIterator<FloatCursor> {
private final FloatCursor cursor;
private final int increment;
private int index;
private int slot;
public ValuesIterator() {
cursor = new FloatCursor();
int seed = nextIterationSeed();
increment = HashContainers.iterationIncrement(seed);
slot = seed & mask;
}
@Override
protected FloatCursor fetch() {
final int mask = IntFloatHashMap.this.mask;
while (index <= mask) {
index++;
slot = (slot + increment) & mask;
if (!((keys[slot]) == 0)) {
cursor.index = slot;
cursor.value = values[slot];
return cursor;
}
}
if (index == mask + 1 && hasEmptyKey) {
cursor.index = index;
cursor.value = values[index++];
return cursor;
}
return done();
}
}
@Override
public IntFloatHashMap clone() {
try {
/* */
IntFloatHashMap cloned = (IntFloatHashMap) super.clone();
cloned.keys = keys.clone();
cloned.values = values.clone();
cloned.hasEmptyKey = hasEmptyKey;
cloned.iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet();
return cloned;
} catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
}
/** Convert the contents of this map to a human-friendly string. */
@Override
public String toString() {
final StringBuilder buffer = new StringBuilder();
buffer.append("[");
boolean first = true;
for (IntFloatCursor cursor : this) {
if (!first) {
buffer.append(", ");
}
buffer.append(cursor.key);
buffer.append("=>");
buffer.append(cursor.value);
first = false;
}
buffer.append("]");
return buffer.toString();
}
/** Creates a hash map from two index-aligned arrays of key-value pairs. */
public static IntFloatHashMap from(int[] keys, float[] values) {
if (keys.length != values.length) {
throw new IllegalArgumentException(
"Arrays of keys and values must have an identical length.");
}
IntFloatHashMap map = new IntFloatHashMap(keys.length);
for (int i = 0; i < keys.length; i++) {
map.put(keys[i], values[i]);
}
return map;
}
/**
* Returns a hash code for the given key.
*
* <p>The output from this function should evenly distribute keys across the entire integer range.
*/
protected int hashKey(int key) {
assert !((key) == 0); // Handled as a special case (empty slot marker).
return BitMixer.mixPhi(key);
}
/**
* Validate load factor range and return it. Override and suppress if you need insane load
* factors.
*/
protected double verifyLoadFactor(double loadFactor) {
HashContainers.checkLoadFactor(
loadFactor, HashContainers.MIN_LOAD_FACTOR, HashContainers.MAX_LOAD_FACTOR);
return loadFactor;
}
/** Rehash from old buffers to new buffers. */
protected void rehash(int[] fromKeys, float[] fromValues) {
assert fromKeys.length == fromValues.length
&& HashContainers.checkPowerOfTwo(fromKeys.length - 1);
// Rehash all stored key/value pairs into the new buffers.
final int[] keys = this.keys;
final float[] values = this.values;
final int mask = this.mask;
int existing;
// Copy the zero element's slot, then rehash everything else.
int from = fromKeys.length - 1;
keys[keys.length - 1] = fromKeys[from];
values[values.length - 1] = fromValues[from];
while (--from >= 0) {
if (!((existing = fromKeys[from]) == 0)) {
int slot = hashKey(existing) & mask;
while (!((keys[slot]) == 0)) {
slot = (slot + 1) & mask;
}
keys[slot] = existing;
values[slot] = fromValues[from];
}
}
}
/**
* Allocate new internal buffers. This method attempts to allocate and assign internal buffers
* atomically (either allocations succeed or not).
*/
protected void allocateBuffers(int arraySize) {
assert Integer.bitCount(arraySize) == 1;
// Ensure no change is done if we hit an OOM.
int[] prevKeys = this.keys;
float[] prevValues = this.values;
try {
int emptyElementSlot = 1;
this.keys = (new int[arraySize + emptyElementSlot]);
this.values = (new float[arraySize + emptyElementSlot]);
} catch (OutOfMemoryError e) {
this.keys = prevKeys;
this.values = prevValues;
throw new BufferAllocationException(
"Not enough memory to allocate buffers for rehashing: %,d -> %,d",
e, this.mask + 1, arraySize);
}
this.resizeAt = HashContainers.expandAtCount(arraySize, loadFactor);
this.mask = arraySize - 1;
}
/**
* This method is invoked when there is a new key/ value pair to be inserted into the buffers but
* there is not enough empty slots to do so.
*
* <p>New buffers are allocated. If this succeeds, we know we can proceed with rehashing so we
* assign the pending element to the previous buffer (possibly violating the invariant of having
* at least one empty slot) and rehash all keys, substituting new buffers at the end.
*/
protected void allocateThenInsertThenRehash(int slot, int pendingKey, float pendingValue) {
assert assigned == resizeAt && ((keys[slot]) == 0) && !((pendingKey) == 0);
// Try to allocate new buffers first. If we OOM, we leave in a consistent state.
final int[] prevKeys = this.keys;
final float[] prevValues = this.values;
allocateBuffers(HashContainers.nextBufferSize(mask + 1, size(), loadFactor));
assert this.keys.length > prevKeys.length;
// We have succeeded at allocating new data so insert the pending key/value at
// the free slot in the old arrays before rehashing.
prevKeys[slot] = pendingKey;
prevValues[slot] = pendingValue;
// Rehash old keys, including the pending key.
rehash(prevKeys, prevValues);
}
/**
* Shift all the slot-conflicting keys and values allocated to (and including) <code>slot</code>.
*/
protected void shiftConflictingKeys(int gapSlot) {
final int[] keys = this.keys;
final float[] values = this.values;
final int mask = this.mask;
// Perform shifts of conflicting keys to fill in the gap.
int distance = 0;
while (true) {
final int slot = (gapSlot + (++distance)) & mask;
final int existing = keys[slot];
if (((existing) == 0)) {
break;
}
final int idealSlot = hashKey(existing);
final int shift = (slot - idealSlot) & mask;
if (shift >= distance) {
// Entry at this position was originally at or before the gap slot.
// Move the conflict-shifted entry to the gap's position and repeat the procedure
// for any entries to the right of the current position, treating it
// as the new gap.
keys[gapSlot] = existing;
values[gapSlot] = values[slot];
gapSlot = slot;
distance = 0;
}
}
// Mark the last found gap slot without a conflict as empty.
keys[gapSlot] = 0;
values[gapSlot] = 0;
assigned--;
}
/** Forked from HPPC, holding int index,key and value */
public static final class IntFloatCursor {
/**
* The current key and value's index in the container this cursor beints to. The meaning of this
* index is defined by the container (usually it will be an index in the underlying storage
* buffer).
*/
public int index;
/** The current key. */
public int key;
/** The current value. */
public float value;
@Override
public String toString() {
return "[cursor, index: " + index + ", key: " + key + ", value: " + value + "]";
}
}
}

View File

@ -15,9 +15,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.apache.lucene.util.hppc.HashContainers.*; import static org.apache.lucene.internal.hppc.HashContainers.*;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
@ -31,7 +31,9 @@ import org.apache.lucene.util.RamUsageEstimator;
* *
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntHashSet * <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntHashSet
* *
* <p>github: https://github.com/carrotsearch/hppc release 0.9.0 * <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/ */
public class IntHashSet implements Iterable<IntCursor>, Accountable, Cloneable { public class IntHashSet implements Iterable<IntCursor>, Accountable, Cloneable {

View File

@ -15,9 +15,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.apache.lucene.util.hppc.HashContainers.*; import static org.apache.lucene.internal.hppc.HashContainers.*;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
@ -30,7 +30,9 @@ import org.apache.lucene.util.RamUsageEstimator;
* *
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntIntHashMap * <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntIntHashMap
* *
* <p>github: https://github.com/carrotsearch/hppc release 0.9.0 * <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/ */
public class IntIntHashMap implements Iterable<IntIntHashMap.IntIntCursor>, Accountable, Cloneable { public class IntIntHashMap implements Iterable<IntIntHashMap.IntIntCursor>, Accountable, Cloneable {
@ -310,7 +312,7 @@ public class IntIntHashMap implements Iterable<IntIntHashMap.IntIntCursor>, Acco
} }
public boolean indexExists(int index) { public boolean indexExists(int index) {
assert index < 0 || (index >= 0 && index <= mask) || (index == mask + 1 && hasEmptyKey); assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey);
return index >= 0; return index >= 0;
} }
@ -806,7 +808,7 @@ public class IntIntHashMap implements Iterable<IntIntHashMap.IntIntCursor>, Acco
} }
/** Forked from HPPC, holding int index,key and value */ /** Forked from HPPC, holding int index,key and value */
public final class IntIntCursor { public static final class IntIntCursor {
/** /**
* The current key and value's index in the container this cursor belongs to. The meaning of * The current key and value's index in the container this cursor belongs to. The meaning of
* this index is defined by the container (usually it will be an index in the underlying storage * this index is defined by the container (usually it will be an index in the underlying storage

View File

@ -15,9 +15,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.apache.lucene.util.hppc.HashContainers.*; import static org.apache.lucene.internal.hppc.HashContainers.*;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
@ -30,7 +30,9 @@ import org.apache.lucene.util.RamUsageEstimator;
* *
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntObjectHashMap * <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntObjectHashMap
* *
* <p>github: https://github.com/carrotsearch/hppc release 0.9.0 * <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public class IntObjectHashMap<VType> public class IntObjectHashMap<VType>
@ -278,7 +280,7 @@ public class IntObjectHashMap<VType>
} }
public boolean indexExists(int index) { public boolean indexExists(int index) {
assert index < 0 || (index >= 0 && index <= mask) || (index == mask + 1 && hasEmptyKey); assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey);
return index >= 0; return index >= 0;
} }

View File

@ -15,9 +15,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.apache.lucene.util.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS; import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
@ -31,7 +31,9 @@ import org.apache.lucene.util.RamUsageEstimator;
* *
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.LongArrayList * <p>Mostly forked and trimmed from com.carrotsearch.hppc.LongArrayList
* *
* <p>github: https://github.com/carrotsearch/hppc release 0.9.0 * <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/ */
public class LongArrayList implements Iterable<LongCursor>, Cloneable, Accountable { public class LongArrayList implements Iterable<LongCursor>, Cloneable, Accountable {
private static final long BASE_RAM_BYTES_USED = private static final long BASE_RAM_BYTES_USED =
@ -40,13 +42,11 @@ public class LongArrayList implements Iterable<LongCursor>, Cloneable, Accountab
/** An immutable empty buffer (array). */ /** An immutable empty buffer (array). */
public static final long[] EMPTY_ARRAY = new long[0]; public static final long[] EMPTY_ARRAY = new long[0];
;
/** /**
* Internal array for storing the list. The array may be larger than the current size ({@link * Internal array for storing the list. The array may be larger than the current size ({@link
* #size()}). * #size()}).
*/ */
public long[] buffer = EMPTY_ARRAY; public long[] buffer;
/** Current number of elements stored in {@link #buffer}. */ /** Current number of elements stored in {@link #buffer}. */
public int elementsCount; public int elementsCount;
@ -262,8 +262,7 @@ public class LongArrayList implements Iterable<LongCursor>, Cloneable, Accountab
* @param expectedElements The total number of elements, inclusive. * @param expectedElements The total number of elements, inclusive.
*/ */
public void ensureCapacity(int expectedElements) { public void ensureCapacity(int expectedElements) {
final int bufferLen = (buffer == null ? 0 : buffer.length); if (expectedElements > buffer.length) {
if (expectedElements > bufferLen) {
ensureBufferSpace(expectedElements - size()); ensureBufferSpace(expectedElements - size());
} }
} }
@ -273,8 +272,7 @@ public class LongArrayList implements Iterable<LongCursor>, Cloneable, Accountab
* Increases internal buffer size if needed. * Increases internal buffer size if needed.
*/ */
protected void ensureBufferSpace(int expectedAdditions) { protected void ensureBufferSpace(int expectedAdditions) {
final int bufferLen = (buffer == null ? 0 : buffer.length); if (elementsCount + expectedAdditions > buffer.length) {
if (elementsCount + expectedAdditions > bufferLen) {
this.buffer = ArrayUtil.grow(buffer, elementsCount + expectedAdditions); this.buffer = ArrayUtil.grow(buffer, elementsCount + expectedAdditions);
} }
} }
@ -315,7 +313,7 @@ public class LongArrayList implements Iterable<LongCursor>, Cloneable, Accountab
* #elementsCount} field to zero. * #elementsCount} field to zero.
*/ */
public void clear() { public void clear() {
Arrays.fill(buffer, 0, elementsCount, 0); Arrays.fill(buffer, 0, elementsCount, 0L);
this.elementsCount = 0; this.elementsCount = 0;
} }

View File

@ -15,9 +15,13 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
/** Forked from HPPC, holding int index and long value */ /**
* Forked from HPPC, holding int index and long value.
*
* @lucene.internal
*/
public final class LongCursor { public final class LongCursor {
/** /**
* The current value's index in the container this cursor belongs to. The meaning of this index is * The current value's index in the container this cursor belongs to. The meaning of this index is

View File

@ -0,0 +1,852 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.internal.hppc;
import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_EXPECTED_ELEMENTS;
import static org.apache.lucene.internal.hppc.HashContainers.DEFAULT_LOAD_FACTOR;
import static org.apache.lucene.internal.hppc.HashContainers.ITERATION_SEED;
import static org.apache.lucene.internal.hppc.HashContainers.MAX_LOAD_FACTOR;
import static org.apache.lucene.internal.hppc.HashContainers.MIN_LOAD_FACTOR;
import static org.apache.lucene.internal.hppc.HashContainers.checkLoadFactor;
import static org.apache.lucene.internal.hppc.HashContainers.checkPowerOfTwo;
import static org.apache.lucene.internal.hppc.HashContainers.expandAtCount;
import static org.apache.lucene.internal.hppc.HashContainers.iterationIncrement;
import static org.apache.lucene.internal.hppc.HashContainers.minBufferSize;
import static org.apache.lucene.internal.hppc.HashContainers.nextBufferSize;
import java.util.Arrays;
import java.util.Iterator;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.RamUsageEstimator;
/**
* A hash map of <code>long</code> to <code>float</code>, implemented using open addressing with
* linear probing for collision resolution.
*
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.LongFloatHashMap
*
* <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/
public class LongFloatHashMap
implements Iterable<LongFloatHashMap.LongFloatCursor>, Accountable, Cloneable {
private static final long BASE_RAM_BYTES_USED =
RamUsageEstimator.shallowSizeOfInstance(LongFloatHashMap.class);
/** The array holding keys. */
public long[] keys;
/** The array holding values. */
public float[] values;
/**
* The number of stored keys (assigned key slots), excluding the special "empty" key, if any (use
* {@link #size()} instead).
*
* @see #size()
*/
protected int assigned;
/** Mask for slot scans in {@link #keys}. */
protected int mask;
/** Expand (rehash) {@link #keys} when {@link #assigned} hits this value. */
protected int resizeAt;
/** Special treatment for the "empty slot" key marker. */
protected boolean hasEmptyKey;
/** The load factor for {@link #keys}. */
protected double loadFactor;
/** Seed used to ensure the hash iteration order is different from an iteration to another. */
protected int iterationSeed;
/** New instance with sane defaults. */
public LongFloatHashMap() {
this(DEFAULT_EXPECTED_ELEMENTS);
}
/**
* New instance with sane defaults.
*
* @param expectedElements The expected number of elements guaranteed not to cause buffer
* expansion (inclusive).
*/
public LongFloatHashMap(int expectedElements) {
this(expectedElements, DEFAULT_LOAD_FACTOR);
}
/**
* New instance with the provided defaults.
*
* @param expectedElements The expected number of elements guaranteed not to cause a rehash
* (inclusive).
* @param loadFactor The load factor for internal buffers. Insane load factors (zero, full
* capacity) are rejected by {@link #verifyLoadFactor(double)}.
*/
public LongFloatHashMap(int expectedElements, double loadFactor) {
this.loadFactor = verifyLoadFactor(loadFactor);
iterationSeed = ITERATION_SEED.incrementAndGet();
ensureCapacity(expectedElements);
}
/** Create a hash map from all key-value pairs of another map. */
public LongFloatHashMap(LongFloatHashMap map) {
this(map.size());
putAll(map);
}
public float put(long key, float value) {
assert assigned < mask + 1;
final int mask = this.mask;
if (((key) == 0)) {
float previousValue = hasEmptyKey ? values[mask + 1] : 0;
hasEmptyKey = true;
values[mask + 1] = value;
return previousValue;
} else {
final long[] keys = this.keys;
int slot = hashKey(key) & mask;
long existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
final float previousValue = values[slot];
values[slot] = value;
return previousValue;
}
slot = (slot + 1) & mask;
}
if (assigned == resizeAt) {
allocateThenInsertThenRehash(slot, key, value);
} else {
keys[slot] = key;
values[slot] = value;
}
assigned++;
return 0;
}
}
public int putAll(Iterable<? extends LongFloatCursor> iterable) {
final int count = size();
for (LongFloatCursor c : iterable) {
put(c.key, c.value);
}
return size() - count;
}
/**
* <a href="http://trove4j.sourceforge.net">Trove</a>-inspired API method. An equivalent of the
* following code:
*
* <pre>
* if (!map.containsKey(key)) map.put(value);
* </pre>
*
* @param key The key of the value to check.
* @param value The value to put if <code>key</code> does not exist.
* @return <code>true</code> if <code>key</code> did not exist and <code>value</code> was placed
* in the map.
*/
public boolean putIfAbsent(long key, float value) {
int keyIndex = indexOf(key);
if (!indexExists(keyIndex)) {
indexInsert(keyIndex, key, value);
return true;
} else {
return false;
}
}
/**
* If <code>key</code> exists, <code>putValue</code> is inserted into the map, otherwise any
* existing value is incremented by <code>additionValue</code>.
*
* @param key The key of the value to adjust.
* @param putValue The value to put if <code>key</code> does not exist.
* @param incrementValue The value to add to the existing value if <code>key</code> exists.
* @return Returns the current value associated with <code>key</code> (after changes).
*/
public float putOrAdd(long key, float putValue, float incrementValue) {
assert assigned < mask + 1;
int keyIndex = indexOf(key);
if (indexExists(keyIndex)) {
putValue = values[keyIndex] + incrementValue;
indexReplace(keyIndex, putValue);
} else {
indexInsert(keyIndex, key, putValue);
}
return putValue;
}
/**
* Adds <code>incrementValue</code> to any existing value for the given <code>key</code> or
* inserts <code>incrementValue</code> if <code>key</code> did not previously exist.
*
* @param key The key of the value to adjust.
* @param incrementValue The value to put or add to the existing value if <code>key</code> exists.
* @return Returns the current value associated with <code>key</code> (after changes).
*/
public float addTo(long key, float incrementValue) {
return putOrAdd(key, incrementValue, incrementValue);
}
public float remove(long key) {
final int mask = this.mask;
if (((key) == 0)) {
if (!hasEmptyKey) {
return 0;
}
hasEmptyKey = false;
float previousValue = values[mask + 1];
values[mask + 1] = 0;
return previousValue;
} else {
final long[] keys = this.keys;
int slot = hashKey(key) & mask;
long existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
final float previousValue = values[slot];
shiftConflictingKeys(slot);
return previousValue;
}
slot = (slot + 1) & mask;
}
return 0;
}
}
public float get(long key) {
if (((key) == 0)) {
return hasEmptyKey ? values[mask + 1] : 0;
} else {
final long[] keys = this.keys;
final int mask = this.mask;
int slot = hashKey(key) & mask;
long existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return values[slot];
}
slot = (slot + 1) & mask;
}
return 0;
}
}
public float getOrDefault(long key, float defaultValue) {
if (((key) == 0)) {
return hasEmptyKey ? values[mask + 1] : defaultValue;
} else {
final long[] keys = this.keys;
final int mask = this.mask;
int slot = hashKey(key) & mask;
long existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return values[slot];
}
slot = (slot + 1) & mask;
}
return defaultValue;
}
}
public boolean containsKey(long key) {
if (((key) == 0)) {
return hasEmptyKey;
} else {
final long[] keys = this.keys;
final int mask = this.mask;
int slot = hashKey(key) & mask;
long existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return true;
}
slot = (slot + 1) & mask;
}
return false;
}
}
public int indexOf(long key) {
final int mask = this.mask;
if (((key) == 0)) {
return hasEmptyKey ? mask + 1 : ~(mask + 1);
} else {
final long[] keys = this.keys;
int slot = hashKey(key) & mask;
long existing;
while (!((existing = keys[slot]) == 0)) {
if (((existing) == (key))) {
return slot;
}
slot = (slot + 1) & mask;
}
return ~slot;
}
}
public boolean indexExists(int index) {
assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey);
return index >= 0;
}
public float indexGet(int index) {
assert index >= 0 : "The index must point at an existing key.";
assert index <= mask || (index == mask + 1 && hasEmptyKey);
return values[index];
}
public float indexReplace(int index, float newValue) {
assert index >= 0 : "The index must point at an existing key.";
assert index <= mask || (index == mask + 1 && hasEmptyKey);
float previousValue = values[index];
values[index] = newValue;
return previousValue;
}
public void indexInsert(int index, long key, float value) {
assert index < 0 : "The index must not point at an existing key.";
index = ~index;
if (((key) == 0)) {
assert index == mask + 1;
values[index] = value;
hasEmptyKey = true;
} else {
assert ((keys[index]) == 0);
if (assigned == resizeAt) {
allocateThenInsertThenRehash(index, key, value);
} else {
keys[index] = key;
values[index] = value;
}
assigned++;
}
}
public float indexRemove(int index) {
assert index >= 0 : "The index must point at an existing key.";
assert index <= mask || (index == mask + 1 && hasEmptyKey);
float previousValue = values[index];
if (index > mask) {
assert index == mask + 1;
hasEmptyKey = false;
values[index] = 0;
} else {
shiftConflictingKeys(index);
}
return previousValue;
}
public void clear() {
assigned = 0;
hasEmptyKey = false;
Arrays.fill(keys, 0L);
/* */
}
public void release() {
assigned = 0;
hasEmptyKey = false;
keys = null;
values = null;
ensureCapacity(DEFAULT_EXPECTED_ELEMENTS);
}
public int size() {
return assigned + (hasEmptyKey ? 1 : 0);
}
public boolean isEmpty() {
return size() == 0;
}
@Override
public int hashCode() {
int h = hasEmptyKey ? 0xDEADBEEF : 0;
for (LongFloatCursor c : this) {
h += BitMixer.mix(c.key) + BitMixer.mix(c.value);
}
return h;
}
@Override
public boolean equals(Object obj) {
return (this == obj)
|| (obj != null && getClass() == obj.getClass() && equalElements(getClass().cast(obj)));
}
/** Return true if all keys of some other container exist in this container. */
protected boolean equalElements(LongFloatHashMap other) {
if (other.size() != size()) {
return false;
}
for (LongFloatCursor c : other) {
long key = c.key;
if (!containsKey(key) || !(Float.floatToIntBits(c.value) == Float.floatToIntBits(get(key)))) {
return false;
}
}
return true;
}
/**
* Ensure this container can hold at least the given number of keys (entries) without resizing its
* buffers.
*
* @param expectedElements The total number of keys, inclusive.
*/
public void ensureCapacity(int expectedElements) {
if (expectedElements > resizeAt || keys == null) {
final long[] prevKeys = this.keys;
final float[] prevValues = this.values;
allocateBuffers(minBufferSize(expectedElements, loadFactor));
if (prevKeys != null && !isEmpty()) {
rehash(prevKeys, prevValues);
}
}
}
/**
* Provides the next iteration seed used to build the iteration starting slot and offset
* increment. This method does not need to be synchronized, what matters is that each thread gets
* a sequence of varying seeds.
*/
protected int nextIterationSeed() {
return iterationSeed = BitMixer.mixPhi(iterationSeed);
}
@Override
public long ramBytesUsed() {
return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(keys) + RamUsageEstimator.sizeOf(values);
}
/** An iterator implementation for {@link #iterator}. */
private final class EntryIterator extends AbstractIterator<LongFloatCursor> {
private final LongFloatCursor cursor;
private final int increment;
private int index;
private int slot;
public EntryIterator() {
cursor = new LongFloatCursor();
int seed = nextIterationSeed();
increment = iterationIncrement(seed);
slot = seed & mask;
}
@Override
protected LongFloatCursor fetch() {
final int mask = LongFloatHashMap.this.mask;
while (index <= mask) {
long existing;
index++;
slot = (slot + increment) & mask;
if (!((existing = keys[slot]) == 0)) {
cursor.index = slot;
cursor.key = existing;
cursor.value = values[slot];
return cursor;
}
}
if (index == mask + 1 && hasEmptyKey) {
cursor.index = index;
cursor.key = 0;
cursor.value = values[index++];
return cursor;
}
return done();
}
}
@Override
public Iterator<LongFloatCursor> iterator() {
return new EntryIterator();
}
/** Returns a specialized view of the keys of this associated container. */
public KeysContainer keys() {
return new KeysContainer();
}
/** A view of the keys inside this hash map. */
public final class KeysContainer implements Iterable<LongCursor> {
@Override
public Iterator<LongCursor> iterator() {
return new KeysIterator();
}
public int size() {
return LongFloatHashMap.this.size();
}
public long[] toArray() {
long[] array = new long[size()];
int i = 0;
for (LongCursor cursor : this) {
array[i++] = cursor.value;
}
return array;
}
}
/** An iterator over the set of assigned keys. */
private final class KeysIterator extends AbstractIterator<LongCursor> {
private final LongCursor cursor;
private final int increment;
private int index;
private int slot;
public KeysIterator() {
cursor = new LongCursor();
int seed = nextIterationSeed();
increment = iterationIncrement(seed);
slot = seed & mask;
}
@Override
protected LongCursor fetch() {
final int mask = LongFloatHashMap.this.mask;
while (index <= mask) {
long existing;
index++;
slot = (slot + increment) & mask;
if (!((existing = keys[slot]) == 0)) {
cursor.index = slot;
cursor.value = existing;
return cursor;
}
}
if (index == mask + 1 && hasEmptyKey) {
cursor.index = index++;
cursor.value = 0;
return cursor;
}
return done();
}
}
/**
* @return Returns a container with all values stored in this map.
*/
public ValuesContainer values() {
return new ValuesContainer();
}
/** A view over the set of values of this map. */
public final class ValuesContainer implements Iterable<FloatCursor> {
@Override
public Iterator<FloatCursor> iterator() {
return new ValuesIterator();
}
public int size() {
return LongFloatHashMap.this.size();
}
public float[] toArray() {
float[] array = new float[size()];
int i = 0;
for (FloatCursor cursor : this) {
array[i++] = cursor.value;
}
return array;
}
}
/** An iterator over the set of assigned values. */
private final class ValuesIterator extends AbstractIterator<FloatCursor> {
private final FloatCursor cursor;
private final int increment;
private int index;
private int slot;
public ValuesIterator() {
cursor = new FloatCursor();
int seed = nextIterationSeed();
increment = iterationIncrement(seed);
slot = seed & mask;
}
@Override
protected FloatCursor fetch() {
final int mask = LongFloatHashMap.this.mask;
while (index <= mask) {
index++;
slot = (slot + increment) & mask;
if (!((keys[slot]) == 0)) {
cursor.index = slot;
cursor.value = values[slot];
return cursor;
}
}
if (index == mask + 1 && hasEmptyKey) {
cursor.index = index;
cursor.value = values[index++];
return cursor;
}
return done();
}
}
@Override
public LongFloatHashMap clone() {
try {
/* */
LongFloatHashMap cloned = (LongFloatHashMap) super.clone();
cloned.keys = keys.clone();
cloned.values = values.clone();
cloned.hasEmptyKey = hasEmptyKey;
cloned.iterationSeed = ITERATION_SEED.incrementAndGet();
return cloned;
} catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
}
/** Convert the contents of this map to a human-friendly string. */
@Override
public String toString() {
final StringBuilder buffer = new StringBuilder();
buffer.append("[");
boolean first = true;
for (LongFloatCursor cursor : this) {
if (!first) {
buffer.append(", ");
}
buffer.append(cursor.key);
buffer.append("=>");
buffer.append(cursor.value);
first = false;
}
buffer.append("]");
return buffer.toString();
}
/** Creates a hash map from two index-aligned arrays of key-value pairs. */
public static LongFloatHashMap from(long[] keys, float[] values) {
if (keys.length != values.length) {
throw new IllegalArgumentException(
"Arrays of keys and values must have an identical length.");
}
LongFloatHashMap map = new LongFloatHashMap(keys.length);
for (int i = 0; i < keys.length; i++) {
map.put(keys[i], values[i]);
}
return map;
}
/**
* Returns a hash code for the given key.
*
* <p>The output from this function should evenly distribute keys across the entire integer range.
*/
protected int hashKey(long key) {
assert !((key) == 0); // Handled as a special case (empty slot marker).
return BitMixer.mixPhi(key);
}
/**
* Validate load factor range and return it. Override and suppress if you need insane load
* factors.
*/
protected double verifyLoadFactor(double loadFactor) {
checkLoadFactor(loadFactor, MIN_LOAD_FACTOR, MAX_LOAD_FACTOR);
return loadFactor;
}
/** Rehash from old buffers to new buffers. */
protected void rehash(long[] fromKeys, float[] fromValues) {
assert fromKeys.length == fromValues.length && checkPowerOfTwo(fromKeys.length - 1);
// Rehash all stored key/value pairs into the new buffers.
final long[] keys = this.keys;
final float[] values = this.values;
final int mask = this.mask;
long existing;
// Copy the zero element's slot, then rehash everything else.
int from = fromKeys.length - 1;
keys[keys.length - 1] = fromKeys[from];
values[values.length - 1] = fromValues[from];
while (--from >= 0) {
if (!((existing = fromKeys[from]) == 0)) {
int slot = hashKey(existing) & mask;
while (!((keys[slot]) == 0)) {
slot = (slot + 1) & mask;
}
keys[slot] = existing;
values[slot] = fromValues[from];
}
}
}
/**
* Allocate new internal buffers. This method attempts to allocate and assign internal buffers
* atomically (either allocations succeed or not).
*/
protected void allocateBuffers(int arraySize) {
assert Integer.bitCount(arraySize) == 1;
// Ensure no change is done if we hit an OOM.
long[] prevKeys = this.keys;
float[] prevValues = this.values;
try {
int emptyElementSlot = 1;
this.keys = (new long[arraySize + emptyElementSlot]);
this.values = (new float[arraySize + emptyElementSlot]);
} catch (OutOfMemoryError e) {
this.keys = prevKeys;
this.values = prevValues;
throw new BufferAllocationException(
"Not enough memory to allocate buffers for rehashing: %,d -> %,d",
e, this.mask + 1, arraySize);
}
this.resizeAt = expandAtCount(arraySize, loadFactor);
this.mask = arraySize - 1;
}
/**
* This method is invoked when there is a new key/ value pair to be inserted into the buffers but
* there is not enough empty slots to do so.
*
* <p>New buffers are allocated. If this succeeds, we know we can proceed with rehashing so we
* assign the pending element to the previous buffer (possibly violating the invariant of having
* at least one empty slot) and rehash all keys, substituting new buffers at the end.
*/
protected void allocateThenInsertThenRehash(int slot, long pendingKey, float pendingValue) {
assert assigned == resizeAt && ((keys[slot]) == 0) && !((pendingKey) == 0);
// Try to allocate new buffers first. If we OOM, we leave in a consistent state.
final long[] prevKeys = this.keys;
final float[] prevValues = this.values;
allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor));
assert this.keys.length > prevKeys.length;
// We have succeeded at allocating new data so insert the pending key/value at
// the free slot in the old arrays before rehashing.
prevKeys[slot] = pendingKey;
prevValues[slot] = pendingValue;
// Rehash old keys, including the pending key.
rehash(prevKeys, prevValues);
}
/**
* Shift all the slot-conflicting keys and values allocated to (and including) <code>slot</code>.
*/
protected void shiftConflictingKeys(int gapSlot) {
final long[] keys = this.keys;
final float[] values = this.values;
final int mask = this.mask;
// Perform shifts of conflicting keys to fill in the gap.
int distance = 0;
while (true) {
final int slot = (gapSlot + (++distance)) & mask;
final long existing = keys[slot];
if (((existing) == 0)) {
break;
}
final int idealSlot = hashKey(existing);
final int shift = (slot - idealSlot) & mask;
if (shift >= distance) {
// Entry at this position was originally at or before the gap slot.
// Move the conflict-shifted entry to the gap's position and repeat the procedure
// for any entries to the right of the current position, treating it
// as the new gap.
keys[gapSlot] = existing;
values[gapSlot] = values[slot];
gapSlot = slot;
distance = 0;
}
}
// Mark the last found gap slot without a conflict as empty.
keys[gapSlot] = 0;
values[gapSlot] = 0;
assigned--;
}
/** Forked from HPPC, holding int index,key and value */
public static final class LongFloatCursor {
/**
* The current key and value's index in the container this cursor belongs to. The meaning of
* this index is defined by the container (usually it will be an index in the underlying storage
* buffer).
*/
public int index;
/** The current key. */
public long key;
/** The current value. */
public float value;
@Override
public String toString() {
return "[cursor, index: " + index + ", key: " + key + ", value: " + value + "]";
}
}
}

View File

@ -15,9 +15,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.apache.lucene.util.hppc.HashContainers.*; import static org.apache.lucene.internal.hppc.HashContainers.*;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
@ -30,7 +30,9 @@ import org.apache.lucene.util.RamUsageEstimator;
* *
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.LongHashSet * <p>Mostly forked and trimmed from com.carrotsearch.hppc.LongHashSet
* *
* <p>github: https://github.com/carrotsearch/hppc release 0.9.0 * <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/ */
public class LongHashSet implements Iterable<LongCursor>, Accountable, Cloneable { public class LongHashSet implements Iterable<LongCursor>, Accountable, Cloneable {

View File

@ -15,9 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.apache.lucene.util.hppc.HashContainers.*;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
@ -30,7 +28,9 @@ import org.apache.lucene.util.RamUsageEstimator;
* *
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.LongIntHashMap * <p>Mostly forked and trimmed from com.carrotsearch.hppc.LongIntHashMap
* *
* <p>github: https://github.com/carrotsearch/hppc release 0.9.0 * <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/ */
public class LongIntHashMap public class LongIntHashMap
implements Iterable<LongIntHashMap.LongIntCursor>, Accountable, Cloneable { implements Iterable<LongIntHashMap.LongIntCursor>, Accountable, Cloneable {
@ -69,7 +69,7 @@ public class LongIntHashMap
/** New instance with sane defaults. */ /** New instance with sane defaults. */
public LongIntHashMap() { public LongIntHashMap() {
this(DEFAULT_EXPECTED_ELEMENTS); this(HashContainers.DEFAULT_EXPECTED_ELEMENTS);
} }
/** /**
@ -79,7 +79,7 @@ public class LongIntHashMap
* expansion (inclusive). * expansion (inclusive).
*/ */
public LongIntHashMap(int expectedElements) { public LongIntHashMap(int expectedElements) {
this(expectedElements, DEFAULT_LOAD_FACTOR); this(expectedElements, HashContainers.DEFAULT_LOAD_FACTOR);
} }
/** /**
@ -92,7 +92,7 @@ public class LongIntHashMap
*/ */
public LongIntHashMap(int expectedElements, double loadFactor) { public LongIntHashMap(int expectedElements, double loadFactor) {
this.loadFactor = verifyLoadFactor(loadFactor); this.loadFactor = verifyLoadFactor(loadFactor);
iterationSeed = ITERATION_SEED.incrementAndGet(); iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet();
ensureCapacity(expectedElements); ensureCapacity(expectedElements);
} }
@ -311,7 +311,7 @@ public class LongIntHashMap
} }
public boolean indexExists(int index) { public boolean indexExists(int index) {
assert index < 0 || (index >= 0 && index <= mask) || (index == mask + 1 && hasEmptyKey); assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey);
return index >= 0; return index >= 0;
} }
@ -373,7 +373,7 @@ public class LongIntHashMap
assigned = 0; assigned = 0;
hasEmptyKey = false; hasEmptyKey = false;
Arrays.fill(keys, 0); Arrays.fill(keys, 0L);
/* */ /* */
} }
@ -384,7 +384,7 @@ public class LongIntHashMap
keys = null; keys = null;
values = null; values = null;
ensureCapacity(DEFAULT_EXPECTED_ELEMENTS); ensureCapacity(HashContainers.DEFAULT_EXPECTED_ELEMENTS);
} }
public int size() { public int size() {
@ -436,7 +436,7 @@ public class LongIntHashMap
if (expectedElements > resizeAt || keys == null) { if (expectedElements > resizeAt || keys == null) {
final long[] prevKeys = this.keys; final long[] prevKeys = this.keys;
final int[] prevValues = this.values; final int[] prevValues = this.values;
allocateBuffers(minBufferSize(expectedElements, loadFactor)); allocateBuffers(HashContainers.minBufferSize(expectedElements, loadFactor));
if (prevKeys != null && !isEmpty()) { if (prevKeys != null && !isEmpty()) {
rehash(prevKeys, prevValues); rehash(prevKeys, prevValues);
} }
@ -467,7 +467,7 @@ public class LongIntHashMap
public EntryIterator() { public EntryIterator() {
cursor = new LongIntCursor(); cursor = new LongIntCursor();
int seed = nextIterationSeed(); int seed = nextIterationSeed();
increment = iterationIncrement(seed); increment = HashContainers.iterationIncrement(seed);
slot = seed & mask; slot = seed & mask;
} }
@ -539,7 +539,7 @@ public class LongIntHashMap
public KeysIterator() { public KeysIterator() {
cursor = new LongCursor(); cursor = new LongCursor();
int seed = nextIterationSeed(); int seed = nextIterationSeed();
increment = iterationIncrement(seed); increment = HashContainers.iterationIncrement(seed);
slot = seed & mask; slot = seed & mask;
} }
@ -606,7 +606,7 @@ public class LongIntHashMap
public ValuesIterator() { public ValuesIterator() {
cursor = new IntCursor(); cursor = new IntCursor();
int seed = nextIterationSeed(); int seed = nextIterationSeed();
increment = iterationIncrement(seed); increment = HashContainers.iterationIncrement(seed);
slot = seed & mask; slot = seed & mask;
} }
@ -641,7 +641,7 @@ public class LongIntHashMap
cloned.keys = keys.clone(); cloned.keys = keys.clone();
cloned.values = values.clone(); cloned.values = values.clone();
cloned.hasEmptyKey = hasEmptyKey; cloned.hasEmptyKey = hasEmptyKey;
cloned.iterationSeed = ITERATION_SEED.incrementAndGet(); cloned.iterationSeed = HashContainers.ITERATION_SEED.incrementAndGet();
return cloned; return cloned;
} catch (CloneNotSupportedException e) { } catch (CloneNotSupportedException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
@ -698,13 +698,15 @@ public class LongIntHashMap
* factors. * factors.
*/ */
protected double verifyLoadFactor(double loadFactor) { protected double verifyLoadFactor(double loadFactor) {
checkLoadFactor(loadFactor, MIN_LOAD_FACTOR, MAX_LOAD_FACTOR); HashContainers.checkLoadFactor(
loadFactor, HashContainers.MIN_LOAD_FACTOR, HashContainers.MAX_LOAD_FACTOR);
return loadFactor; return loadFactor;
} }
/** Rehash from old buffers to new buffers. */ /** Rehash from old buffers to new buffers. */
protected void rehash(long[] fromKeys, int[] fromValues) { protected void rehash(long[] fromKeys, int[] fromValues) {
assert fromKeys.length == fromValues.length && checkPowerOfTwo(fromKeys.length - 1); assert fromKeys.length == fromValues.length
&& HashContainers.checkPowerOfTwo(fromKeys.length - 1);
// Rehash all stored key/value pairs into the new buffers. // Rehash all stored key/value pairs into the new buffers.
final long[] keys = this.keys; final long[] keys = this.keys;
@ -750,7 +752,7 @@ public class LongIntHashMap
e, this.mask + 1, arraySize); e, this.mask + 1, arraySize);
} }
this.resizeAt = expandAtCount(arraySize, loadFactor); this.resizeAt = HashContainers.expandAtCount(arraySize, loadFactor);
this.mask = arraySize - 1; this.mask = arraySize - 1;
} }
@ -768,7 +770,7 @@ public class LongIntHashMap
// Try to allocate new buffers first. If we OOM, we leave in a consistent state. // Try to allocate new buffers first. If we OOM, we leave in a consistent state.
final long[] prevKeys = this.keys; final long[] prevKeys = this.keys;
final int[] prevValues = this.values; final int[] prevValues = this.values;
allocateBuffers(nextBufferSize(mask + 1, size(), loadFactor)); allocateBuffers(HashContainers.nextBufferSize(mask + 1, size(), loadFactor));
assert this.keys.length > prevKeys.length; assert this.keys.length > prevKeys.length;
// We have succeeded at allocating new data so insert the pending key/value at // We have succeeded at allocating new data so insert the pending key/value at
@ -818,7 +820,7 @@ public class LongIntHashMap
} }
/** Forked from HPPC, holding int index,key and value */ /** Forked from HPPC, holding int index,key and value */
public final class LongIntCursor { public static final class LongIntCursor {
/** /**
* The current key and value's index in the container this cursor belongs to. The meaning of * The current key and value's index in the container this cursor belongs to. The meaning of
* this index is defined by the container (usually it will be an index in the underlying storage * this index is defined by the container (usually it will be an index in the underlying storage

View File

@ -15,9 +15,9 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.apache.lucene.util.hppc.HashContainers.*; import static org.apache.lucene.internal.hppc.HashContainers.*;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
@ -30,7 +30,9 @@ import org.apache.lucene.util.RamUsageEstimator;
* *
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.LongObjectHashMap * <p>Mostly forked and trimmed from com.carrotsearch.hppc.LongObjectHashMap
* *
* <p>github: https://github.com/carrotsearch/hppc release 0.9.0 * <p>github: https://github.com/carrotsearch/hppc release 0.10.0
*
* @lucene.internal
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public class LongObjectHashMap<VType> public class LongObjectHashMap<VType>
@ -278,7 +280,7 @@ public class LongObjectHashMap<VType>
} }
public boolean indexExists(int index) { public boolean indexExists(int index) {
assert index < 0 || (index >= 0 && index <= mask) || (index == mask + 1 && hasEmptyKey); assert index < 0 || index <= mask || (index == mask + 1 && hasEmptyKey);
return index >= 0; return index >= 0;
} }
@ -340,7 +342,7 @@ public class LongObjectHashMap<VType>
assigned = 0; assigned = 0;
hasEmptyKey = false; hasEmptyKey = false;
Arrays.fill(keys, 0); Arrays.fill(keys, 0L);
/* */ /* */
} }

View File

@ -15,9 +15,13 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
/** Forked from HPPC, holding int index and Object value */ /**
* Forked from HPPC, holding int index and Object value.
*
* @lucene.internal
*/
public final class ObjectCursor<VType> { public final class ObjectCursor<VType> {
/** /**
* The current value's index in the container this cursor belongs to. The meaning of this index is * The current value's index in the container this cursor belongs to. The meaning of this index is

View File

@ -15,5 +15,8 @@
* limitations under the License. * limitations under the License.
*/ */
/** package holding hppc related classes. */ /**
package org.apache.lucene.util.hppc; * Internal copy of a subset of classes from the <a
* href="https://github.com/carrotsearch/hppc/">HPPC</a> library.
*/
package org.apache.lucene.internal.hppc;

View File

@ -33,12 +33,12 @@ import org.apache.lucene.index.TermState;
import org.apache.lucene.index.TermStates; import org.apache.lucene.index.TermStates;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.Similarity.SimScorer; import org.apache.lucene.search.similarities.Similarity.SimScorer;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.PriorityQueue;
import org.apache.lucene.util.hppc.IntArrayList;
/** /**
* A generalized version of {@link PhraseQuery}, with the possibility of adding more than one term * A generalized version of {@link PhraseQuery}, with the possibility of adding more than one term

View File

@ -33,11 +33,11 @@ import org.apache.lucene.index.TermState;
import org.apache.lucene.index.TermStates; import org.apache.lucene.index.TermStates;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.search.similarities.Similarity.SimScorer; import org.apache.lucene.search.similarities.Similarity.SimScorer;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.hppc.IntArrayList;
/** /**
* A Query that matches documents containing a particular sequence of terms. A PhraseQuery is built * A Query that matches documents containing a particular sequence of terms. A PhraseQuery is built

View File

@ -29,9 +29,9 @@ import org.apache.lucene.index.Impact;
import org.apache.lucene.index.Impacts; import org.apache.lucene.index.Impacts;
import org.apache.lucene.index.ImpactsSource; import org.apache.lucene.index.ImpactsSource;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.internal.hppc.IntHashSet;
import org.apache.lucene.search.similarities.Similarity.SimScorer; import org.apache.lucene.search.similarities.Similarity.SimScorer;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.hppc.IntHashSet;
/** /**
* Find all slop-valid position-combinations (matches) encountered while traversing/hopping the * Find all slop-valid position-combinations (matches) encountered while traversing/hopping the

View File

@ -19,12 +19,12 @@ package org.apache.lucene.util.automaton;
import java.util.Arrays; import java.util.Arrays;
import java.util.BitSet; import java.util.BitSet;
import java.util.Objects; import java.util.Objects;
import org.apache.lucene.internal.hppc.IntHashSet;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.InPlaceMergeSorter; import org.apache.lucene.util.InPlaceMergeSorter;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.Sorter; import org.apache.lucene.util.Sorter;
import org.apache.lucene.util.hppc.IntHashSet;
// TODO // TODO
// - could use packed int arrays instead // - could use packed int arrays instead

View File

@ -22,6 +22,7 @@ import org.apache.lucene.index.SingleTermsEnum;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
@ -31,7 +32,6 @@ import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.StringHelper; import org.apache.lucene.util.StringHelper;
import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.hppc.IntArrayList;
/** /**
* Immutable class holding compiled details for a given Automaton. The Automaton could either be * Immutable class holding compiled details for a given Automaton. The Automaton could either be

View File

@ -17,8 +17,8 @@
package org.apache.lucene.util.automaton; package org.apache.lucene.util.automaton;
import java.util.Arrays; import java.util.Arrays;
import org.apache.lucene.internal.hppc.IntHashSet;
import org.apache.lucene.util.UnicodeUtil; import org.apache.lucene.util.UnicodeUtil;
import org.apache.lucene.util.hppc.IntHashSet;
/** /**
* Class to construct DFAs that match a word within some edit distance. * Class to construct DFAs that match a word within some edit distance.

View File

@ -20,8 +20,8 @@ package org.apache.lucene.util.automaton;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.lucene.internal.hppc.BitMixer;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.hppc.BitMixer;
/** /**
* A RunAutomaton that does not require DFA. It will lazily determinize on-demand, memorizing the * A RunAutomaton that does not require DFA. It will lazily determinize on-demand, memorizing the

View File

@ -39,6 +39,10 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.lucene.internal.hppc.BitMixer;
import org.apache.lucene.internal.hppc.IntCursor;
import org.apache.lucene.internal.hppc.IntHashSet;
import org.apache.lucene.internal.hppc.IntObjectHashMap;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -47,10 +51,6 @@ import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.hppc.BitMixer;
import org.apache.lucene.util.hppc.IntCursor;
import org.apache.lucene.util.hppc.IntHashSet;
import org.apache.lucene.util.hppc.IntObjectHashMap;
/** /**
* Automata operations. * Automata operations.

View File

@ -18,9 +18,9 @@
package org.apache.lucene.util.automaton; package org.apache.lucene.util.automaton;
import java.util.Arrays; import java.util.Arrays;
import org.apache.lucene.util.hppc.BitMixer; import org.apache.lucene.internal.hppc.BitMixer;
import org.apache.lucene.util.hppc.IntCursor; import org.apache.lucene.internal.hppc.IntCursor;
import org.apache.lucene.util.hppc.IntIntHashMap; import org.apache.lucene.internal.hppc.IntIntHashMap;
/** /**
* A thin wrapper of {@link IntIntHashMap} Maps from state in integer representation to its * A thin wrapper of {@link IntIntHashMap} Maps from state in integer representation to its

View File

@ -17,7 +17,7 @@
package org.apache.lucene.util.automaton; package org.apache.lucene.util.automaton;
import java.util.Arrays; import java.util.Arrays;
import org.apache.lucene.util.hppc.IntArrayList; import org.apache.lucene.internal.hppc.IntArrayList;
// TODO // TODO
// - do we really need the .bits...? if not we can make util in UnicodeUtil to convert 1 char // - do we really need the .bits...? if not we can make util in UnicodeUtil to convert 1 char

View File

@ -28,6 +28,7 @@ import org.apache.lucene.index.MergeState;
import org.apache.lucene.index.PointValues; import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.PointValues.IntersectVisitor; import org.apache.lucene.index.PointValues.IntersectVisitor;
import org.apache.lucene.index.PointValues.Relation; import org.apache.lucene.index.PointValues.Relation;
import org.apache.lucene.internal.hppc.LongArrayList;
import org.apache.lucene.store.ByteBuffersDataOutput; import org.apache.lucene.store.ByteBuffersDataOutput;
import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.ChecksumIndexInput;
import org.apache.lucene.store.DataOutput; import org.apache.lucene.store.DataOutput;
@ -44,7 +45,6 @@ import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.PriorityQueue; import org.apache.lucene.util.PriorityQueue;
import org.apache.lucene.util.bkd.BKDUtil.ByteArrayPredicate; import org.apache.lucene.util.bkd.BKDUtil.ByteArrayPredicate;
import org.apache.lucene.util.hppc.LongArrayList;
// TODO // TODO
// - allow variable length byte[] (across docs and dims), but this is quite a bit more hairy // - allow variable length byte[] (across docs and dims), but this is quite a bit more hairy

View File

@ -26,14 +26,14 @@ import java.util.Comparator;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.TreeSet; import java.util.TreeSet;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.internal.hppc.IntCursor;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRef;
import org.apache.lucene.util.IntsRefBuilder; import org.apache.lucene.util.IntsRefBuilder;
import org.apache.lucene.util.fst.FST.Arc; import org.apache.lucene.util.fst.FST.Arc;
import org.apache.lucene.util.fst.FST.BytesReader; import org.apache.lucene.util.fst.FST.BytesReader;
import org.apache.lucene.util.hppc.IntArrayList;
import org.apache.lucene.util.hppc.IntCursor;
/** /**
* Static helper methods. * Static helper methods.

View File

@ -30,6 +30,7 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute;
import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.AttributeSource;
import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.IntsRef;
@ -37,7 +38,6 @@ import org.apache.lucene.util.automaton.Automaton;
import org.apache.lucene.util.automaton.FiniteStringsIterator; import org.apache.lucene.util.automaton.FiniteStringsIterator;
import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.Operations;
import org.apache.lucene.util.automaton.Transition; import org.apache.lucene.util.automaton.Transition;
import org.apache.lucene.util.hppc.IntArrayList;
/** /**
* Consumes a TokenStream and creates an {@link Automaton} where the transition labels are terms * Consumes a TokenStream and creates an {@link Automaton} where the transition labels are terms

View File

@ -25,8 +25,8 @@ import java.util.Iterator;
import java.util.NoSuchElementException; import java.util.NoSuchElementException;
import java.util.PrimitiveIterator; import java.util.PrimitiveIterator;
import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.FloatVectorValues;
import org.apache.lucene.util.hppc.IntArrayList; import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.util.hppc.IntCursor; import org.apache.lucene.internal.hppc.IntCursor;
/** /**
* Hierarchical Navigable Small World graph. Provides efficient approximate nearest neighbor search * Hierarchical Navigable Small World graph. Provides efficient approximate nearest neighbor search

View File

@ -26,12 +26,12 @@ import org.apache.lucene.index.ByteVectorValues;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.FloatVectorValues;
import org.apache.lucene.index.MergeState; import org.apache.lucene.index.MergeState;
import org.apache.lucene.internal.hppc.IntIntHashMap;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.InfoStream; import org.apache.lucene.util.InfoStream;
import org.apache.lucene.util.hppc.IntIntHashMap;
/** /**
* This selects the biggest Hnsw graph from the provided merge state and initializes a new * This selects the biggest Hnsw graph from the provided merge state and initializes a new

View File

@ -21,10 +21,10 @@ import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import org.apache.lucene.internal.hppc.IntArrayList;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.hppc.IntArrayList;
/** /**
* An {@link HnswGraph} where all nodes and connections are held in memory. This class is used to * An {@link HnswGraph} where all nodes and connections are held in memory. This class is used to

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.RandomizedTest;
import java.util.Arrays; import java.util.Arrays;

View File

@ -0,0 +1,460 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.internal.hppc;
import java.util.Arrays;
import java.util.Iterator;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.junit.Before;
import org.junit.Test;
/**
* Tests for {@link FloatArrayList}.
*
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.FloatArrayListTest
*
* <p>github: https://github.com/carrotsearch/hppc release: 0.9.0
*/
public class TestFloatArrayList extends LuceneTestCase {
private final float key0 = cast(0);
private final float key1 = cast(1);
private final float key2 = cast(2);
private final float key3 = cast(3);
private final float key4 = cast(4);
private final float key5 = cast(5);
private final float key6 = cast(6);
private final float key7 = cast(7);
/** Convert to target type from an integer used to test stuff. */
private float cast(int v) {
return v;
}
/** Per-test fresh initialized instance. */
private FloatArrayList list;
@Before
public void initialize() {
list = new FloatArrayList();
}
@Test
public void testInitiallyEmpty() {
assertEquals(0, list.size());
}
@Test
public void testAdd() {
list.add(key1, key2);
assertListEquals(list.toArray(), 1, 2);
}
@Test
public void testAddTwoArgs() {
list.add(key1, key2);
list.add(key3, key4);
assertListEquals(list.toArray(), 1, 2, 3, 4);
}
@Test
public void testAddArray() {
list.add(asArray(0, 1, 2, 3), 1, 2);
assertListEquals(list.toArray(), 1, 2);
}
@Test
public void testAddVarArg() {
list.add(asArray(0, 1, 2, 3));
list.add(key4, key5, key6, key7);
assertListEquals(list.toArray(), 0, 1, 2, 3, 4, 5, 6, 7);
}
@Test
public void testAddAll() {
FloatArrayList list2 = new FloatArrayList();
list2.add(asArray(0, 1, 2));
list.addAll(list2);
list.addAll(list2);
assertListEquals(list.toArray(), 0, 1, 2, 0, 1, 2);
}
@Test
public void testInsert() {
list.insert(0, key1);
list.insert(0, key2);
list.insert(2, key3);
list.insert(1, key4);
assertListEquals(list.toArray(), 2, 4, 1, 3);
}
@Test
public void testSet() {
list.add(asArray(0, 1, 2));
assertEquals(0, list.set(0, key3));
assertEquals(1, list.set(1, key4));
assertEquals(2, list.set(2, key5));
assertListEquals(list.toArray(), 3, 4, 5);
}
@Test
public void testRemoveAt() {
list.add(asArray(0, 1, 2, 3, 4));
list.removeAt(0);
list.removeAt(2);
list.removeAt(1);
assertListEquals(list.toArray(), 1, 4);
}
@Test
public void testRemoveLast() {
list.add(asArray(0, 1, 2, 3, 4));
assertEquals(4, list.removeLast());
assertEquals(4, list.size());
assertListEquals(list.toArray(), 0, 1, 2, 3);
assertEquals(3, list.removeLast());
assertEquals(3, list.size());
assertListEquals(list.toArray(), 0, 1, 2);
assertEquals(2, list.removeLast());
assertEquals(1, list.removeLast());
assertEquals(0, list.removeLast());
assertTrue(list.isEmpty());
}
@Test
public void testRemoveElement() {
list.add(asArray(0, 1, 2, 3, 3, 4));
assertTrue(list.removeElement(3));
assertTrue(list.removeElement(2));
assertFalse(list.removeElement(5));
assertListEquals(list.toArray(), 0, 1, 3, 4);
}
@Test
public void testRemoveRange() {
list.add(asArray(0, 1, 2, 3, 4));
list.removeRange(0, 2);
assertListEquals(list.toArray(), 2, 3, 4);
list.removeRange(2, 3);
assertListEquals(list.toArray(), 2, 3);
list.removeRange(1, 1);
assertListEquals(list.toArray(), 2, 3);
list.removeRange(0, 1);
assertListEquals(list.toArray(), 3);
}
@Test
public void testRemoveFirstLast() {
list.add(asArray(0, 1, 2, 1, 0));
assertEquals(-1, list.removeFirst(key5));
assertEquals(-1, list.removeLast(key5));
assertListEquals(list.toArray(), 0, 1, 2, 1, 0);
assertEquals(1, list.removeFirst(key1));
assertListEquals(list.toArray(), 0, 2, 1, 0);
assertEquals(3, list.removeLast(key0));
assertListEquals(list.toArray(), 0, 2, 1);
assertEquals(0, list.removeLast(key0));
assertListEquals(list.toArray(), 2, 1);
assertEquals(-1, list.removeLast(key0));
}
@Test
public void testRemoveAll() {
list.add(asArray(0, 1, 0, 1, 0));
assertEquals(0, list.removeAll(key2));
assertEquals(3, list.removeAll(key0));
assertListEquals(list.toArray(), 1, 1);
assertEquals(2, list.removeAll(key1));
assertTrue(list.isEmpty());
}
@Test
public void testIndexOf() {
list.add(asArray(0, 1, 2, 1, 0));
assertEquals(0, list.indexOf(key0));
assertEquals(-1, list.indexOf(key3));
assertEquals(2, list.indexOf(key2));
}
@Test
public void testLastIndexOf() {
list.add(asArray(0, 1, 2, 1, 0));
assertEquals(4, list.lastIndexOf(key0));
assertEquals(-1, list.lastIndexOf(key3));
assertEquals(2, list.lastIndexOf(key2));
}
@Test
public void testEnsureCapacity() {
FloatArrayList list = new FloatArrayList(0);
assertEquals(list.size(), list.buffer.length);
float[] buffer1 = list.buffer;
list.ensureCapacity(100);
assertNotSame(buffer1, list.buffer);
}
@Test
public void testResizeAndCleanBuffer() {
list.ensureCapacity(20);
Arrays.fill(list.buffer, key1);
list.resize(10);
assertEquals(10, list.size());
for (int i = 0; i < list.size(); i++) {
assertEquals(0, list.get(i));
}
Arrays.fill(list.buffer, 0);
for (int i = 5; i < list.size(); i++) {
list.set(i, key1);
}
list.resize(5);
assertEquals(5, list.size());
for (int i = list.size(); i < list.buffer.length; i++) {
assertEquals(0, list.buffer[i]);
}
}
@Test
public void testTrimToSize() {
list.add(asArray(1, 2));
list.trimToSize();
assertEquals(2, list.buffer.length);
}
@Test
public void testRelease() {
list.add(asArray(1, 2));
list.release();
assertEquals(0, list.size());
list.add(asArray(1, 2));
assertEquals(2, list.size());
}
@Test
public void testIterable() {
list.add(asArray(0, 1, 2, 3));
int count = 0;
for (FloatCursor cursor : list) {
count++;
assertEquals(list.get(cursor.index), cursor.value);
assertEquals(list.buffer[cursor.index], cursor.value);
}
assertEquals(count, list.size());
count = 0;
list.resize(0);
for (@SuppressWarnings("unused") FloatCursor cursor : list) {
count++;
}
assertEquals(0, count);
}
@Test
public void testIterator() {
list.add(asArray(0, 1, 2, 3));
Iterator<FloatCursor> iterator = list.iterator();
int count = 0;
while (iterator.hasNext()) {
iterator.hasNext();
iterator.hasNext();
iterator.hasNext();
iterator.next();
count++;
}
assertEquals(count, list.size());
list.resize(0);
assertFalse(list.iterator().hasNext());
}
@Test
public void testClear() {
list.add(asArray(1, 2, 3));
list.clear();
assertTrue(list.isEmpty());
assertEquals(-1, list.indexOf(cast(1)));
}
@Test
public void testFrom() {
list = FloatArrayList.from(key1, key2, key3);
assertEquals(3, list.size());
assertListEquals(list.toArray(), 1, 2, 3);
assertEquals(list.size(), list.buffer.length);
}
@Test
public void testCopyList() {
list.add(asArray(1, 2, 3));
FloatArrayList copy = new FloatArrayList(list);
assertEquals(3, copy.size());
assertListEquals(copy.toArray(), 1, 2, 3);
assertEquals(copy.size(), copy.buffer.length);
}
@Test
public void testHashCodeEquals() {
FloatArrayList l0 = FloatArrayList.from();
assertEquals(1, l0.hashCode());
assertEquals(l0, FloatArrayList.from());
FloatArrayList l1 = FloatArrayList.from(key1, key2, key3);
FloatArrayList l2 = FloatArrayList.from(key1, key2);
l2.add(key3);
assertEquals(l1.hashCode(), l2.hashCode());
assertEquals(l1, l2);
}
@Test
public void testEqualElements() {
FloatArrayList l1 = FloatArrayList.from(key1, key2, key3);
FloatArrayList l2 = FloatArrayList.from(key1, key2);
l2.add(key3);
assertEquals(l1.hashCode(), l2.hashCode());
assertTrue(l2.equalElements(l1));
}
@Test
public void testToArray() {
FloatArrayList l1 = FloatArrayList.from(key1, key2, key3);
l1.ensureCapacity(100);
float[] result = l1.toArray();
assertArrayEquals(new float[] {key1, key2, key3}, result);
}
@Test
public void testClone() {
list.add(key1, key2, key3);
FloatArrayList cloned = list.clone();
cloned.removeAt(cloned.indexOf(key1));
assertSortedListEquals(list.toArray(), key1, key2, key3);
assertSortedListEquals(cloned.toArray(), key2, key3);
}
@Test
public void testToString() {
assertEquals(
"[" + key1 + ", " + key2 + ", " + key3 + "]",
FloatArrayList.from(key1, key2, key3).toString());
}
@Test
public void testEqualsSameClass() {
FloatArrayList l1 = FloatArrayList.from(key1, key2, key3);
FloatArrayList l2 = FloatArrayList.from(key1, key2, key3);
FloatArrayList l3 = FloatArrayList.from(key1, key3, key2);
assertEquals(l1, l2);
assertEquals(l1.hashCode(), l2.hashCode());
assertNotEquals(l1, l3);
}
@Test
public void testEqualsSubClass() {
class Sub extends FloatArrayList {}
;
FloatArrayList l1 = FloatArrayList.from(key1, key2, key3);
FloatArrayList l2 = new Sub();
FloatArrayList l3 = new Sub();
l2.addAll(l1);
l3.addAll(l1);
assertEquals(l2, l3);
assertNotEquals(l1, l3);
}
@Test
public void testSort() {
list.add(key3, key1, key3, key2);
FloatArrayList list2 = new FloatArrayList();
list2.ensureCapacity(100);
list2.addAll(list);
assertSame(list2, list2.sort());
assertEquals(FloatArrayList.from(key1, key2, key3, key3), list2);
}
@Test
public void testReverse() {
for (int i = 0; i < 10; i++) {
float[] elements = new float[i];
for (int j = 0; j < i; j++) {
elements[j] = cast(j);
}
FloatArrayList list = new FloatArrayList();
list.ensureCapacity(30);
list.add(elements);
assertSame(list, list.reverse());
assertEquals(elements.length, list.size());
int reverseIndex = elements.length - 1;
for (FloatCursor cursor : list) {
assertEquals(elements[reverseIndex--], cursor.value);
}
}
}
/** Check if the array's content is identical to a given sequence of elements. */
private static void assertListEquals(float[] array, float... elements) {
assertEquals(elements.length, array.length);
assertArrayEquals(elements, array);
}
private static float[] asArray(float... elements) {
return elements;
}
/** Check if the array's content is identical to a given sequence of elements. */
private static void assertSortedListEquals(float[] array, float... elements) {
assertEquals(elements.length, array.length);
Arrays.sort(array);
Arrays.sort(elements);
assertArrayEquals(elements, array);
}
private static void assertEquals(float f1, float f2) {
assertEquals(f1, f2, 0f);
}
private static void assertArrayEquals(float[] f1, float[] f2) {
assertArrayEquals(f1, f2, 0f);
}
}

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
@ -418,13 +418,12 @@ public class TestIntArrayList extends LuceneTestCase {
@Test @Test
public void testSort() { public void testSort() {
list.add(key1, key3, key2); list.add(key3, key1, key3, key2);
IntArrayList list2 = new IntArrayList(); IntArrayList list2 = new IntArrayList();
list2.ensureCapacity(100); list2.ensureCapacity(100);
list2.addAll(list); list2.addAll(list);
assertSame(list2, list2.sort()); assertSame(list2, list2.sort());
assertEquals(IntArrayList.from(list.stream().sorted().toArray()), list2); assertEquals(IntArrayList.from(key1, key2, key3, key3), list2);
} }
@Test @Test

View File

@ -0,0 +1,654 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.internal.hppc;
import com.carrotsearch.randomizedtesting.RandomizedTest;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.junit.Test;
/**
* Tests for {@link IntDoubleHashMap}.
*
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntDoubleHashMapTest
*
* <p>github: https://github.com/carrotsearch/hppc release: 0.9.0
*/
public class TestIntDoubleHashMap extends LuceneTestCase {
/* Ready to use key values. */
private final int keyE = 0;
private final int key1 = cast(1);
private final int key2 = cast(2);
private final int key3 = cast(3);
private final int key4 = cast(4);
/** Convert to target type from an integer used to test stuff. */
private int cast(int v) {
return v;
}
/** Create a new array of a given type and copy the arguments to this array. */
private int[] newArray(int... elements) {
return elements;
}
private static int randomIntBetween(int min, int max) {
return min + random().nextInt(max + 1 - min);
}
/** Check if the array's content is identical to a given sequence of elements. */
private static void assertSortedListEquals(int[] array, int... elements) {
assertEquals(elements.length, array.length);
Arrays.sort(array);
Arrays.sort(elements);
assertArrayEquals(elements, array);
}
/** Check if the array's content is identical to a given sequence of elements. */
private static void assertSortedListEquals(double[] array, double... elements) {
assertEquals(elements.length, array.length);
Arrays.sort(array);
Arrays.sort(elements);
assertArrayEquals(elements, array);
}
private final int value0 = vcast(0);
private final int value1 = vcast(1);
private final int value2 = vcast(2);
private final int value3 = vcast(3);
private final int value4 = vcast(4);
/** Per-test fresh initialized instance. */
private IntDoubleHashMap map = newInstance();
private IntDoubleHashMap newInstance() {
return new IntDoubleHashMap();
}
/** Convert to target type from an integer used to test stuff. */
private int vcast(int value) {
return value;
}
/** Create a new array of a given type and copy the arguments to this array. */
/* */
private double[] newvArray(int... elements) {
double[] v = new double[elements.length];
for (int i = 0; i < elements.length; i++) {
v[i] = elements[i];
}
return v;
}
private void assertSameMap(final IntDoubleHashMap c1, final IntDoubleHashMap c2) {
assertEquals(c1.size(), c2.size());
for (IntDoubleHashMap.IntDoubleCursor entry : c1) {
assertTrue(c2.containsKey(entry.key));
assertEquals2(entry.value, c2.get(entry.key));
}
}
private static void assertEquals2(double v1, double v2) {
assertEquals(v1, v2, 0f);
}
private static void assertArrayEquals(double[] v1, double[] v2) {
assertArrayEquals(v1, v2, 0f);
}
/* */
@Test
public void testEnsureCapacity() {
final AtomicInteger expands = new AtomicInteger();
IntDoubleHashMap map =
new IntDoubleHashMap(0) {
@Override
protected void allocateBuffers(int arraySize) {
super.allocateBuffers(arraySize);
expands.incrementAndGet();
}
};
// Add some elements.
final int max = rarely() ? 0 : randomIntBetween(0, 250);
for (int i = 0; i < max; i++) {
map.put(cast(i), value0);
}
final int additions = randomIntBetween(max, max + 5000);
map.ensureCapacity(additions + map.size());
final int before = expands.get();
for (int i = 0; i < additions; i++) {
map.put(cast(i), value0);
}
assertEquals(before, expands.get());
}
@Test
public void testCursorIndexIsValid() {
map.put(keyE, value1);
map.put(key1, value2);
map.put(key2, value3);
for (IntDoubleHashMap.IntDoubleCursor c : map) {
assertTrue(map.indexExists(c.index));
assertEquals2(c.value, map.indexGet(c.index));
}
}
@Test
public void testIndexMethods() {
map.put(keyE, value1);
map.put(key1, value2);
assertTrue(map.indexOf(keyE) >= 0);
assertTrue(map.indexOf(key1) >= 0);
assertTrue(map.indexOf(key2) < 0);
assertTrue(map.indexExists(map.indexOf(keyE)));
assertTrue(map.indexExists(map.indexOf(key1)));
assertFalse(map.indexExists(map.indexOf(key2)));
assertEquals2(value1, map.indexGet(map.indexOf(keyE)));
assertEquals2(value2, map.indexGet(map.indexOf(key1)));
expectThrows(
AssertionError.class,
() -> {
map.indexGet(map.indexOf(key2));
});
assertEquals2(value1, map.indexReplace(map.indexOf(keyE), value3));
assertEquals2(value2, map.indexReplace(map.indexOf(key1), value4));
assertEquals2(value3, map.indexGet(map.indexOf(keyE)));
assertEquals2(value4, map.indexGet(map.indexOf(key1)));
map.indexInsert(map.indexOf(key2), key2, value1);
assertEquals2(value1, map.indexGet(map.indexOf(key2)));
assertEquals(3, map.size());
assertEquals2(value3, map.indexRemove(map.indexOf(keyE)));
assertEquals(2, map.size());
assertEquals2(value1, map.indexRemove(map.indexOf(key2)));
assertEquals(1, map.size());
assertTrue(map.indexOf(keyE) < 0);
assertTrue(map.indexOf(key1) >= 0);
assertTrue(map.indexOf(key2) < 0);
}
/* */
@Test
public void testCloningConstructor() {
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value3);
assertSameMap(map, new IntDoubleHashMap(map));
}
/* */
@Test
public void testFromArrays() {
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value3);
IntDoubleHashMap map2 =
IntDoubleHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3));
assertSameMap(map, map2);
}
@Test
public void testGetOrDefault() {
map.put(key2, value2);
assertTrue(map.containsKey(key2));
map.put(key1, value1);
assertEquals2(value1, map.getOrDefault(key1, value3));
assertEquals2(value3, map.getOrDefault(key3, value3));
map.remove(key1);
assertEquals2(value3, map.getOrDefault(key1, value3));
}
/* */
@Test
public void testPut() {
map.put(key1, value1);
assertTrue(map.containsKey(key1));
assertEquals2(value1, map.get(key1));
}
/* */
@Test
public void testPutOverExistingKey() {
map.put(key1, value1);
assertEquals2(value1, map.put(key1, value3));
assertEquals2(value3, map.get(key1));
}
/* */
@Test
public void testPutWithExpansions() {
final int COUNT = 10000;
final Random rnd = new Random(random().nextInt());
final HashSet<Object> values = new HashSet<Object>();
for (int i = 0; i < COUNT; i++) {
final int v = rnd.nextInt();
final boolean hadKey = values.contains(cast(v));
values.add(cast(v));
assertEquals(hadKey, map.containsKey(cast(v)));
map.put(cast(v), vcast(v));
assertEquals(values.size(), map.size());
}
assertEquals(values.size(), map.size());
}
/* */
@Test
public void testPutAll() {
map.put(key1, value1);
map.put(key2, value1);
IntDoubleHashMap map2 = newInstance();
map2.put(key2, value2);
map2.put(keyE, value1);
// One new key (keyE).
assertEquals(1, map.putAll(map2));
// Assert the value under key2 has been replaced.
assertEquals2(value2, map.get(key2));
// And key3 has been added.
assertEquals2(value1, map.get(keyE));
assertEquals(3, map.size());
}
/* */
@Test
public void testPutIfAbsent() {
assertTrue(map.putIfAbsent(key1, value1));
assertFalse(map.putIfAbsent(key1, value2));
assertEquals2(value1, map.get(key1));
}
@Test
public void testPutOrAdd() {
assertEquals2(value1, map.putOrAdd(key1, value1, value2));
assertEquals2(value3, map.putOrAdd(key1, value1, value2));
}
@Test
public void testAddTo() {
assertEquals2(value1, map.addTo(key1, value1));
assertEquals2(value3, map.addTo(key1, value2));
}
/* */
@Test
public void testRemove() {
map.put(key1, value1);
assertEquals2(value1, map.remove(key1));
assertEquals2(0, map.remove(key1));
assertEquals(0, map.size());
// These are internals, but perhaps worth asserting too.
assertEquals(0, map.assigned);
}
/* */
@Test
public void testEmptyKey() {
final int empty = 0;
map.put(empty, value1);
assertEquals(1, map.size());
assertFalse(map.isEmpty());
assertEquals2(value1, map.get(empty));
assertEquals2(value1, map.getOrDefault(empty, value2));
assertTrue(map.iterator().hasNext());
assertEquals(empty, map.iterator().next().key);
assertEquals2(value1, map.iterator().next().value);
map.remove(empty);
assertEquals2(0, map.get(empty));
assertEquals(0, map.size());
assertEquals2(0, map.put(empty, value1));
assertEquals2(value1, map.put(empty, value2));
map.clear();
assertFalse(map.indexExists(map.indexOf(empty)));
assertEquals2(0, map.put(empty, value1));
map.clear();
assertEquals2(0, map.remove(empty));
}
/* */
@Test
public void testMapKeySet() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
assertSortedListEquals(map.keys().toArray(), key1, key2, key3);
}
/* */
@Test
public void testMapKeySetIterator() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
int counted = 0;
for (IntCursor c : map.keys()) {
assertEquals(map.keys[c.index], c.value);
counted++;
}
assertEquals(counted, map.size());
}
/* */
@Test
public void testClear() {
map.put(key1, value1);
map.put(key2, value1);
map.clear();
assertEquals(0, map.size());
// These are internals, but perhaps worth asserting too.
assertEquals(0, map.assigned);
// Check values are cleared.
assertEquals2(0, map.put(key1, value1));
assertEquals2(0, map.remove(key2));
map.clear();
// Check if the map behaves properly upon subsequent use.
testPutWithExpansions();
}
/* */
@Test
public void testRelease() {
map.put(key1, value1);
map.put(key2, value1);
map.release();
assertEquals(0, map.size());
// These are internals, but perhaps worth asserting too.
assertEquals(0, map.assigned);
// Check if the map behaves properly upon subsequent use.
testPutWithExpansions();
}
/* */
@Test
public void testIterable() {
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value3);
map.remove(key2);
int count = 0;
for (IntDoubleHashMap.IntDoubleCursor cursor : map) {
count++;
assertTrue(map.containsKey(cursor.key));
assertEquals2(cursor.value, map.get(cursor.key));
assertEquals2(cursor.value, map.values[cursor.index]);
assertEquals(cursor.key, map.keys[cursor.index]);
}
assertEquals(count, map.size());
map.clear();
assertFalse(map.iterator().hasNext());
}
/* */
@Test
public void testBug_HPPC73_FullCapacityGet() {
final AtomicInteger reallocations = new AtomicInteger();
final int elements = 0x7F;
map =
new IntDoubleHashMap(elements, 1f) {
@Override
protected double verifyLoadFactor(double loadFactor) {
// Skip load factor sanity range checking.
return loadFactor;
}
@Override
protected void allocateBuffers(int arraySize) {
super.allocateBuffers(arraySize);
reallocations.incrementAndGet();
}
};
int reallocationsBefore = reallocations.get();
assertEquals(reallocationsBefore, 1);
for (int i = 1; i <= elements; i++) {
map.put(cast(i), value1);
}
// Non-existent key.
int outOfSet = cast(elements + 1);
map.remove(outOfSet);
assertFalse(map.containsKey(outOfSet));
assertEquals(reallocationsBefore, reallocations.get());
// Should not expand because we're replacing an existing element.
map.put(key1, value2);
assertEquals(reallocationsBefore, reallocations.get());
// Remove from a full map.
map.remove(key1);
assertEquals(reallocationsBefore, reallocations.get());
map.put(key1, value2);
// Check expand on "last slot of a full map" condition.
map.put(outOfSet, value1);
assertEquals(reallocationsBefore + 1, reallocations.get());
}
@Test
public void testHashCodeEquals() {
IntDoubleHashMap l0 = newInstance();
assertEquals(0, l0.hashCode());
assertEquals(l0, newInstance());
IntDoubleHashMap l1 =
IntDoubleHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3));
IntDoubleHashMap l2 =
IntDoubleHashMap.from(newArray(key2, key1, key3), newvArray(value2, value1, value3));
IntDoubleHashMap l3 = IntDoubleHashMap.from(newArray(key1, key2), newvArray(value2, value1));
assertEquals(l1.hashCode(), l2.hashCode());
assertEquals(l1, l2);
assertNotEquals(l1, l3);
assertNotEquals(l2, l3);
}
@Test
public void testBug_HPPC37() {
IntDoubleHashMap l1 = IntDoubleHashMap.from(newArray(key1), newvArray(value1));
IntDoubleHashMap l2 = IntDoubleHashMap.from(newArray(key2), newvArray(value1));
assertNotEquals(l1, l2);
assertNotEquals(l2, l1);
}
/** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testAgainstHashMap() {
final Random rnd = RandomizedTest.getRandom();
final HashMap other = new HashMap();
for (int size = 1000; size < 20000; size += 4000) {
other.clear();
map.clear();
for (int round = 0; round < size * 20; round++) {
int key = cast(rnd.nextInt(size));
if (rnd.nextInt(50) == 0) {
key = 0;
}
double value = vcast(rnd.nextInt());
boolean hadOldValue = map.containsKey(key);
if (rnd.nextBoolean()) {
double previousValue;
if (rnd.nextBoolean()) {
int index = map.indexOf(key);
if (map.indexExists(index)) {
previousValue = map.indexReplace(index, value);
} else {
map.indexInsert(index, key, value);
previousValue = 0;
}
} else {
previousValue = map.put(key, value);
}
assertEquals(
other.put(key, value), ((previousValue) == 0) && !hadOldValue ? null : previousValue);
assertEquals2(value, map.get(key));
assertEquals2(value, map.indexGet(map.indexOf(key)));
assertTrue(map.containsKey(key));
assertTrue(map.indexExists(map.indexOf(key)));
} else {
assertEquals(other.containsKey(key), map.containsKey(key));
double previousValue =
map.containsKey(key) && rnd.nextBoolean()
? map.indexRemove(map.indexOf(key))
: map.remove(key);
assertEquals(
other.remove(key), ((previousValue) == 0) && !hadOldValue ? null : previousValue);
}
assertEquals(other.size(), map.size());
}
}
}
/*
*
*/
@Test
public void testClone() {
this.map.put(key1, value1);
this.map.put(key2, value2);
this.map.put(key3, value3);
IntDoubleHashMap cloned = map.clone();
cloned.remove(key1);
assertSortedListEquals(map.keys().toArray(), key1, key2, key3);
assertSortedListEquals(cloned.keys().toArray(), key2, key3);
}
/* */
@Test
public void testMapValues() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
assertSortedListEquals(map.values().toArray(), value1, value2, value3);
map.clear();
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value2);
assertSortedListEquals(map.values().toArray(), value1, value2, value2);
}
/* */
@Test
public void testMapValuesIterator() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
int counted = 0;
for (DoubleCursor c : map.values()) {
assertEquals2(map.values[c.index], c.value);
counted++;
}
assertEquals(counted, map.size());
}
/* */
@Test
public void testEqualsSameClass() {
IntDoubleHashMap l1 = newInstance();
l1.put(key1, value0);
l1.put(key2, value1);
l1.put(key3, value2);
IntDoubleHashMap l2 = new IntDoubleHashMap(l1);
l2.putAll(l1);
IntDoubleHashMap l3 = new IntDoubleHashMap(l2);
l3.putAll(l2);
l3.put(key4, value0);
assertEquals(l2, l1);
assertEquals(l2.hashCode(), l1.hashCode());
assertNotEquals(l1, l3);
}
/* */
@Test
public void testEqualsSubClass() {
class Sub extends IntDoubleHashMap {}
IntDoubleHashMap l1 = newInstance();
l1.put(key1, value0);
l1.put(key2, value1);
l1.put(key3, value2);
IntDoubleHashMap l2 = new Sub();
l2.putAll(l1);
l2.put(key4, value3);
IntDoubleHashMap l3 = new Sub();
l3.putAll(l2);
assertNotEquals(l1, l2);
assertEquals(l3.hashCode(), l2.hashCode());
assertEquals(l3, l2);
}
}

View File

@ -0,0 +1,654 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.internal.hppc;
import com.carrotsearch.randomizedtesting.RandomizedTest;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.junit.Test;
/**
* Tests for {@link IntFloatHashMap}.
*
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.IntFloatHashMapTest
*
* <p>github: https://github.com/carrotsearch/hppc release: 0.9.0
*/
public class TestIntFloatHashMap extends LuceneTestCase {
/* Ready to use key values. */
private final int keyE = 0;
private final int key1 = cast(1);
private final int key2 = cast(2);
private final int key3 = cast(3);
private final int key4 = cast(4);
/** Convert to target type from an integer used to test stuff. */
private int cast(int v) {
return v;
}
/** Create a new array of a given type and copy the arguments to this array. */
private int[] newArray(int... elements) {
return elements;
}
private static int randomIntBetween(int min, int max) {
return min + random().nextInt(max + 1 - min);
}
/** Check if the array's content is identical to a given sequence of elements. */
private static void assertSortedListEquals(int[] array, int... elements) {
assertEquals(elements.length, array.length);
Arrays.sort(array);
Arrays.sort(elements);
assertArrayEquals(elements, array);
}
/** Check if the array's content is identical to a given sequence of elements. */
private static void assertSortedListEquals(float[] array, float... elements) {
assertEquals(elements.length, array.length);
Arrays.sort(array);
Arrays.sort(elements);
assertArrayEquals(elements, array);
}
private final int value0 = vcast(0);
private final int value1 = vcast(1);
private final int value2 = vcast(2);
private final int value3 = vcast(3);
private final int value4 = vcast(4);
/** Per-test fresh initialized instance. */
private IntFloatHashMap map = newInstance();
private IntFloatHashMap newInstance() {
return new IntFloatHashMap();
}
/** Convert to target type from an integer used to test stuff. */
private int vcast(int value) {
return value;
}
/** Create a new array of a given type and copy the arguments to this array. */
/* */
private float[] newvArray(int... elements) {
float[] v = new float[elements.length];
for (int i = 0; i < elements.length; i++) {
v[i] = elements[i];
}
return v;
}
private void assertSameMap(final IntFloatHashMap c1, final IntFloatHashMap c2) {
assertEquals(c1.size(), c2.size());
for (IntFloatHashMap.IntFloatCursor entry : c1) {
assertTrue(c2.containsKey(entry.key));
assertEquals2(entry.value, c2.get(entry.key));
}
}
private static void assertEquals2(float v1, float v2) {
assertEquals(v1, v2, 0f);
}
private static void assertArrayEquals(float[] v1, float[] v2) {
assertArrayEquals(v1, v2, 0f);
}
/* */
@Test
public void testEnsureCapacity() {
final AtomicInteger expands = new AtomicInteger();
IntFloatHashMap map =
new IntFloatHashMap(0) {
@Override
protected void allocateBuffers(int arraySize) {
super.allocateBuffers(arraySize);
expands.incrementAndGet();
}
};
// Add some elements.
final int max = rarely() ? 0 : randomIntBetween(0, 250);
for (int i = 0; i < max; i++) {
map.put(cast(i), value0);
}
final int additions = randomIntBetween(max, max + 5000);
map.ensureCapacity(additions + map.size());
final int before = expands.get();
for (int i = 0; i < additions; i++) {
map.put(cast(i), value0);
}
assertEquals(before, expands.get());
}
@Test
public void testCursorIndexIsValid() {
map.put(keyE, value1);
map.put(key1, value2);
map.put(key2, value3);
for (IntFloatHashMap.IntFloatCursor c : map) {
assertTrue(map.indexExists(c.index));
assertEquals2(c.value, map.indexGet(c.index));
}
}
@Test
public void testIndexMethods() {
map.put(keyE, value1);
map.put(key1, value2);
assertTrue(map.indexOf(keyE) >= 0);
assertTrue(map.indexOf(key1) >= 0);
assertTrue(map.indexOf(key2) < 0);
assertTrue(map.indexExists(map.indexOf(keyE)));
assertTrue(map.indexExists(map.indexOf(key1)));
assertFalse(map.indexExists(map.indexOf(key2)));
assertEquals2(value1, map.indexGet(map.indexOf(keyE)));
assertEquals2(value2, map.indexGet(map.indexOf(key1)));
expectThrows(
AssertionError.class,
() -> {
map.indexGet(map.indexOf(key2));
});
assertEquals2(value1, map.indexReplace(map.indexOf(keyE), value3));
assertEquals2(value2, map.indexReplace(map.indexOf(key1), value4));
assertEquals2(value3, map.indexGet(map.indexOf(keyE)));
assertEquals2(value4, map.indexGet(map.indexOf(key1)));
map.indexInsert(map.indexOf(key2), key2, value1);
assertEquals2(value1, map.indexGet(map.indexOf(key2)));
assertEquals(3, map.size());
assertEquals2(value3, map.indexRemove(map.indexOf(keyE)));
assertEquals(2, map.size());
assertEquals2(value1, map.indexRemove(map.indexOf(key2)));
assertEquals(1, map.size());
assertTrue(map.indexOf(keyE) < 0);
assertTrue(map.indexOf(key1) >= 0);
assertTrue(map.indexOf(key2) < 0);
}
/* */
@Test
public void testCloningConstructor() {
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value3);
assertSameMap(map, new IntFloatHashMap(map));
}
/* */
@Test
public void testFromArrays() {
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value3);
IntFloatHashMap map2 =
IntFloatHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3));
assertSameMap(map, map2);
}
@Test
public void testGetOrDefault() {
map.put(key2, value2);
assertTrue(map.containsKey(key2));
map.put(key1, value1);
assertEquals2(value1, map.getOrDefault(key1, value3));
assertEquals2(value3, map.getOrDefault(key3, value3));
map.remove(key1);
assertEquals2(value3, map.getOrDefault(key1, value3));
}
/* */
@Test
public void testPut() {
map.put(key1, value1);
assertTrue(map.containsKey(key1));
assertEquals2(value1, map.get(key1));
}
/* */
@Test
public void testPutOverExistingKey() {
map.put(key1, value1);
assertEquals2(value1, map.put(key1, value3));
assertEquals2(value3, map.get(key1));
}
/* */
@Test
public void testPutWithExpansions() {
final int COUNT = 10000;
final Random rnd = new Random(random().nextInt());
final HashSet<Object> values = new HashSet<Object>();
for (int i = 0; i < COUNT; i++) {
final int v = rnd.nextInt();
final boolean hadKey = values.contains(cast(v));
values.add(cast(v));
assertEquals(hadKey, map.containsKey(cast(v)));
map.put(cast(v), vcast(v));
assertEquals(values.size(), map.size());
}
assertEquals(values.size(), map.size());
}
/* */
@Test
public void testPutAll() {
map.put(key1, value1);
map.put(key2, value1);
IntFloatHashMap map2 = newInstance();
map2.put(key2, value2);
map2.put(keyE, value1);
// One new key (keyE).
assertEquals(1, map.putAll(map2));
// Assert the value under key2 has been replaced.
assertEquals2(value2, map.get(key2));
// And key3 has been added.
assertEquals2(value1, map.get(keyE));
assertEquals(3, map.size());
}
/* */
@Test
public void testPutIfAbsent() {
assertTrue(map.putIfAbsent(key1, value1));
assertFalse(map.putIfAbsent(key1, value2));
assertEquals2(value1, map.get(key1));
}
@Test
public void testPutOrAdd() {
assertEquals2(value1, map.putOrAdd(key1, value1, value2));
assertEquals2(value3, map.putOrAdd(key1, value1, value2));
}
@Test
public void testAddTo() {
assertEquals2(value1, map.addTo(key1, value1));
assertEquals2(value3, map.addTo(key1, value2));
}
/* */
@Test
public void testRemove() {
map.put(key1, value1);
assertEquals2(value1, map.remove(key1));
assertEquals2(0, map.remove(key1));
assertEquals(0, map.size());
// These are internals, but perhaps worth asserting too.
assertEquals(0, map.assigned);
}
/* */
@Test
public void testEmptyKey() {
final int empty = 0;
map.put(empty, value1);
assertEquals(1, map.size());
assertFalse(map.isEmpty());
assertEquals2(value1, map.get(empty));
assertEquals2(value1, map.getOrDefault(empty, value2));
assertTrue(map.iterator().hasNext());
assertEquals(empty, map.iterator().next().key);
assertEquals2(value1, map.iterator().next().value);
map.remove(empty);
assertEquals2(0, map.get(empty));
assertEquals(0, map.size());
assertEquals2(0, map.put(empty, value1));
assertEquals2(value1, map.put(empty, value2));
map.clear();
assertFalse(map.indexExists(map.indexOf(empty)));
assertEquals2(0, map.put(empty, value1));
map.clear();
assertEquals2(0, map.remove(empty));
}
/* */
@Test
public void testMapKeySet() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
assertSortedListEquals(map.keys().toArray(), key1, key2, key3);
}
/* */
@Test
public void testMapKeySetIterator() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
int counted = 0;
for (IntCursor c : map.keys()) {
assertEquals(map.keys[c.index], c.value);
counted++;
}
assertEquals(counted, map.size());
}
/* */
@Test
public void testClear() {
map.put(key1, value1);
map.put(key2, value1);
map.clear();
assertEquals(0, map.size());
// These are internals, but perhaps worth asserting too.
assertEquals(0, map.assigned);
// Check values are cleared.
assertEquals2(0, map.put(key1, value1));
assertEquals2(0, map.remove(key2));
map.clear();
// Check if the map behaves properly upon subsequent use.
testPutWithExpansions();
}
/* */
@Test
public void testRelease() {
map.put(key1, value1);
map.put(key2, value1);
map.release();
assertEquals(0, map.size());
// These are internals, but perhaps worth asserting too.
assertEquals(0, map.assigned);
// Check if the map behaves properly upon subsequent use.
testPutWithExpansions();
}
/* */
@Test
public void testIterable() {
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value3);
map.remove(key2);
int count = 0;
for (IntFloatHashMap.IntFloatCursor cursor : map) {
count++;
assertTrue(map.containsKey(cursor.key));
assertEquals2(cursor.value, map.get(cursor.key));
assertEquals2(cursor.value, map.values[cursor.index]);
assertEquals(cursor.key, map.keys[cursor.index]);
}
assertEquals(count, map.size());
map.clear();
assertFalse(map.iterator().hasNext());
}
/* */
@Test
public void testBug_HPPC73_FullCapacityGet() {
final AtomicInteger reallocations = new AtomicInteger();
final int elements = 0x7F;
map =
new IntFloatHashMap(elements, 1f) {
@Override
protected double verifyLoadFactor(double loadFactor) {
// Skip load factor sanity range checking.
return loadFactor;
}
@Override
protected void allocateBuffers(int arraySize) {
super.allocateBuffers(arraySize);
reallocations.incrementAndGet();
}
};
int reallocationsBefore = reallocations.get();
assertEquals(reallocationsBefore, 1);
for (int i = 1; i <= elements; i++) {
map.put(cast(i), value1);
}
// Non-existent key.
int outOfSet = cast(elements + 1);
map.remove(outOfSet);
assertFalse(map.containsKey(outOfSet));
assertEquals(reallocationsBefore, reallocations.get());
// Should not expand because we're replacing an existing element.
map.put(key1, value2);
assertEquals(reallocationsBefore, reallocations.get());
// Remove from a full map.
map.remove(key1);
assertEquals(reallocationsBefore, reallocations.get());
map.put(key1, value2);
// Check expand on "last slot of a full map" condition.
map.put(outOfSet, value1);
assertEquals(reallocationsBefore + 1, reallocations.get());
}
@Test
public void testHashCodeEquals() {
IntFloatHashMap l0 = newInstance();
assertEquals(0, l0.hashCode());
assertEquals(l0, newInstance());
IntFloatHashMap l1 =
IntFloatHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3));
IntFloatHashMap l2 =
IntFloatHashMap.from(newArray(key2, key1, key3), newvArray(value2, value1, value3));
IntFloatHashMap l3 = IntFloatHashMap.from(newArray(key1, key2), newvArray(value2, value1));
assertEquals(l1.hashCode(), l2.hashCode());
assertEquals(l1, l2);
assertNotEquals(l1, l3);
assertNotEquals(l2, l3);
}
@Test
public void testBug_HPPC37() {
IntFloatHashMap l1 = IntFloatHashMap.from(newArray(key1), newvArray(value1));
IntFloatHashMap l2 = IntFloatHashMap.from(newArray(key2), newvArray(value1));
assertNotEquals(l1, l2);
assertNotEquals(l2, l1);
}
/** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testAgainstHashMap() {
final Random rnd = RandomizedTest.getRandom();
final HashMap other = new HashMap();
for (int size = 1000; size < 20000; size += 4000) {
other.clear();
map.clear();
for (int round = 0; round < size * 20; round++) {
int key = cast(rnd.nextInt(size));
if (rnd.nextInt(50) == 0) {
key = 0;
}
float value = vcast(rnd.nextInt());
boolean hadOldValue = map.containsKey(key);
if (rnd.nextBoolean()) {
float previousValue;
if (rnd.nextBoolean()) {
int index = map.indexOf(key);
if (map.indexExists(index)) {
previousValue = map.indexReplace(index, value);
} else {
map.indexInsert(index, key, value);
previousValue = 0;
}
} else {
previousValue = map.put(key, value);
}
assertEquals(
other.put(key, value), ((previousValue) == 0) && !hadOldValue ? null : previousValue);
assertEquals2(value, map.get(key));
assertEquals2(value, map.indexGet(map.indexOf(key)));
assertTrue(map.containsKey(key));
assertTrue(map.indexExists(map.indexOf(key)));
} else {
assertEquals(other.containsKey(key), map.containsKey(key));
float previousValue =
map.containsKey(key) && rnd.nextBoolean()
? map.indexRemove(map.indexOf(key))
: map.remove(key);
assertEquals(
other.remove(key), ((previousValue) == 0) && !hadOldValue ? null : previousValue);
}
assertEquals(other.size(), map.size());
}
}
}
/*
*
*/
@Test
public void testClone() {
this.map.put(key1, value1);
this.map.put(key2, value2);
this.map.put(key3, value3);
IntFloatHashMap cloned = map.clone();
cloned.remove(key1);
assertSortedListEquals(map.keys().toArray(), key1, key2, key3);
assertSortedListEquals(cloned.keys().toArray(), key2, key3);
}
/* */
@Test
public void testMapValues() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
assertSortedListEquals(map.values().toArray(), value1, value2, value3);
map.clear();
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value2);
assertSortedListEquals(map.values().toArray(), value1, value2, value2);
}
/* */
@Test
public void testMapValuesIterator() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
int counted = 0;
for (FloatCursor c : map.values()) {
assertEquals2(map.values[c.index], c.value);
counted++;
}
assertEquals(counted, map.size());
}
/* */
@Test
public void testEqualsSameClass() {
IntFloatHashMap l1 = newInstance();
l1.put(key1, value0);
l1.put(key2, value1);
l1.put(key3, value2);
IntFloatHashMap l2 = new IntFloatHashMap(l1);
l2.putAll(l1);
IntFloatHashMap l3 = new IntFloatHashMap(l2);
l3.putAll(l2);
l3.put(key4, value0);
assertEquals(l2, l1);
assertEquals(l2.hashCode(), l1.hashCode());
assertNotEquals(l1, l3);
}
/* */
@Test
public void testEqualsSubClass() {
class Sub extends IntFloatHashMap {}
IntFloatHashMap l1 = newInstance();
l1.put(key1, value0);
l1.put(key2, value1);
l1.put(key3, value2);
IntFloatHashMap l2 = new Sub();
l2.putAll(l1);
l2.put(key4, value3);
IntFloatHashMap l3 = new Sub();
l3.putAll(l2);
assertNotEquals(l1, l2);
assertEquals(l3.hashCode(), l2.hashCode());
assertEquals(l3, l2);
}
}

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.RandomizedTest;
import java.util.Arrays; import java.util.Arrays;
@ -24,7 +24,6 @@ import java.util.HashSet;
import java.util.Random; import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase;
import org.junit.After;
import org.junit.Test; import org.junit.Test;
/** /**
@ -78,23 +77,6 @@ public class TestIntIntHashMap extends LuceneTestCase {
return new IntIntHashMap(); return new IntIntHashMap();
} }
@After
public void checkEmptySlotsUninitialized() {
if (map != null) {
int occupied = 0;
for (int i = 0; i <= map.mask; i++) {
if (((map.keys[i]) == 0)) {
} else {
occupied++;
}
}
assertEquals(occupied, map.assigned);
if (!map.hasEmptyKey) {}
}
}
/** Convert to target type from an integer used to test stuff. */ /** Convert to target type from an integer used to test stuff. */
private int vcast(int value) { private int vcast(int value) {
return value; return value;
@ -326,10 +308,10 @@ public class TestIntIntHashMap extends LuceneTestCase {
map.put(empty, value1); map.put(empty, value1);
assertEquals(1, map.size()); assertEquals(1, map.size());
assertEquals(false, map.isEmpty()); assertFalse(map.isEmpty());
assertEquals(value1, map.get(empty)); assertEquals(value1, map.get(empty));
assertEquals(value1, map.getOrDefault(empty, value2)); assertEquals(value1, map.getOrDefault(empty, value2));
assertEquals(true, map.iterator().hasNext()); assertTrue(map.iterator().hasNext());
assertEquals(empty, map.iterator().next().key); assertEquals(empty, map.iterator().next().key);
assertEquals(value1, map.iterator().next().value); assertEquals(value1, map.iterator().next().value);
@ -492,8 +474,8 @@ public class TestIntIntHashMap extends LuceneTestCase {
assertEquals(l1.hashCode(), l2.hashCode()); assertEquals(l1.hashCode(), l2.hashCode());
assertEquals(l1, l2); assertEquals(l1, l2);
assertFalse(l1.equals(l3)); assertNotEquals(l1, l3);
assertFalse(l2.equals(l3)); assertNotEquals(l2, l3);
} }
@Test @Test
@ -502,8 +484,8 @@ public class TestIntIntHashMap extends LuceneTestCase {
IntIntHashMap l2 = IntIntHashMap.from(newArray(key2), newvArray(value1)); IntIntHashMap l2 = IntIntHashMap.from(newArray(key2), newvArray(value1));
assertFalse(l1.equals(l2)); assertNotEquals(l1, l2);
assertFalse(l2.equals(l1)); assertNotEquals(l2, l1);
} }
/** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */ /** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.RandomizedTest;
import java.util.Arrays; import java.util.Arrays;
@ -24,7 +24,6 @@ import java.util.HashSet;
import java.util.Random; import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase;
import org.junit.After;
import org.junit.Test; import org.junit.Test;
/** /**
@ -86,23 +85,6 @@ public class TestIntObjectHashMap extends LuceneTestCase {
return new IntObjectHashMap(); return new IntObjectHashMap();
} }
@After
public void checkEmptySlotsUninitialized() {
if (map != null) {
int occupied = 0;
for (int i = 0; i <= map.mask; i++) {
if (((map.keys[i]) == 0)) {
} else {
occupied++;
}
}
assertEquals(occupied, map.assigned);
if (!map.hasEmptyKey) {}
}
}
/** Convert to target type from an integer used to test stuff. */ /** Convert to target type from an integer used to test stuff. */
private int vcast(int value) { private int vcast(int value) {
return value; return value;
@ -324,7 +306,7 @@ public class TestIntObjectHashMap extends LuceneTestCase {
public void testRemove() { public void testRemove() {
map.put(key1, value1); map.put(key1, value1);
assertEquals(value1, map.remove(key1)); assertEquals(value1, map.remove(key1));
assertEquals(null, map.remove(key1)); assertNull(map.remove(key1));
assertEquals(0, map.size()); assertEquals(0, map.size());
// These are internals, but perhaps worth asserting too. // These are internals, but perhaps worth asserting too.
@ -338,15 +320,15 @@ public class TestIntObjectHashMap extends LuceneTestCase {
map.put(empty, value1); map.put(empty, value1);
assertEquals(1, map.size()); assertEquals(1, map.size());
assertEquals(false, map.isEmpty()); assertFalse(map.isEmpty());
assertEquals(value1, map.get(empty)); assertEquals(value1, map.get(empty));
assertEquals(value1, map.getOrDefault(empty, value2)); assertEquals(value1, map.getOrDefault(empty, value2));
assertEquals(true, map.iterator().hasNext()); assertTrue(map.iterator().hasNext());
assertEquals(empty, map.iterator().next().key); assertEquals(empty, map.iterator().next().key);
assertEquals(value1, map.iterator().next().value); assertEquals(value1, map.iterator().next().value);
map.remove(empty); map.remove(empty);
assertEquals(null, map.get(empty)); assertNull(map.get(empty));
assertEquals(0, map.size()); assertEquals(0, map.size());
map.put(empty, null); map.put(empty, null);
@ -359,13 +341,13 @@ public class TestIntObjectHashMap extends LuceneTestCase {
assertFalse(map.containsKey(empty)); assertFalse(map.containsKey(empty));
assertNull(map.get(empty)); assertNull(map.get(empty));
assertEquals(null, map.put(empty, value1)); assertNull(map.put(empty, value1));
assertEquals(value1, map.put(empty, value2)); assertEquals(value1, map.put(empty, value2));
map.clear(); map.clear();
assertFalse(map.indexExists(map.indexOf(empty))); assertFalse(map.indexExists(map.indexOf(empty)));
assertEquals(null, map.put(empty, value1)); assertNull(map.put(empty, value1));
map.clear(); map.clear();
assertEquals(null, map.remove(empty)); assertNull(map.remove(empty));
} }
/* */ /* */
@ -405,8 +387,8 @@ public class TestIntObjectHashMap extends LuceneTestCase {
assertEquals(0, map.assigned); assertEquals(0, map.assigned);
// Check values are cleared. // Check values are cleared.
assertEquals(null, map.put(key1, value1)); assertNull(map.put(key1, value1));
assertEquals(null, map.remove(key2)); assertNull(map.remove(key2));
map.clear(); map.clear();
// Check if the map behaves properly upon subsequent use. // Check if the map behaves properly upon subsequent use.
@ -514,8 +496,8 @@ public class TestIntObjectHashMap extends LuceneTestCase {
assertEquals(l1.hashCode(), l2.hashCode()); assertEquals(l1.hashCode(), l2.hashCode());
assertEquals(l1, l2); assertEquals(l1, l2);
assertFalse(l1.equals(l3)); assertNotEquals(l1, l3);
assertFalse(l2.equals(l3)); assertNotEquals(l2, l3);
} }
@Test @Test
@ -524,8 +506,8 @@ public class TestIntObjectHashMap extends LuceneTestCase {
IntObjectHashMap l2 = IntObjectHashMap.from(newArray(key2), newvArray(value1)); IntObjectHashMap l2 = IntObjectHashMap.from(newArray(key2), newvArray(value1));
assertFalse(l1.equals(l2)); assertNotEquals(l1, l2);
assertFalse(l2.equals(l1)); assertNotEquals(l2, l1);
} }
/** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */ /** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator; import java.util.Iterator;
@ -418,13 +418,12 @@ public class TestLongArrayList extends LuceneTestCase {
@Test @Test
public void testSort() { public void testSort() {
list.add(key1, key3, key2); list.add(key3, key1, key3, key2);
LongArrayList list2 = new LongArrayList(); LongArrayList list2 = new LongArrayList();
list2.ensureCapacity(100); list2.ensureCapacity(100);
list2.addAll(list); list2.addAll(list);
assertSame(list2, list2.sort()); assertSame(list2, list2.sort());
assertEquals(LongArrayList.from(list.stream().sorted().toArray()), list2); assertEquals(LongArrayList.from(key1, key2, key3, key3), list2);
} }
@Test @Test

View File

@ -0,0 +1,654 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.internal.hppc;
import com.carrotsearch.randomizedtesting.RandomizedTest;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.tests.util.LuceneTestCase;
import org.junit.Test;
/**
* Tests for {@link LongFloatHashMap}.
*
* <p>Mostly forked and trimmed from com.carrotsearch.hppc.LongFloatHashMapTest
*
* <p>github: https://github.com/carrotsearch/hppc release: 0.9.0
*/
public class TestLongFloatHashMap extends LuceneTestCase {
/* Ready to use key values. */
private final long keyE = 0;
private final long key1 = cast(1);
private final long key2 = cast(2);
private final long key3 = cast(3);
private final long key4 = cast(4);
/** Convert to target type from an integer used to test stuff. */
private long cast(int v) {
return v;
}
/** Create a new array of a given type and copy the arguments to this array. */
private long[] newArray(long... elements) {
return elements;
}
private static int randomIntBetween(int min, int max) {
return min + random().nextInt(max + 1 - min);
}
/** Check if the array's content is identical to a given sequence of elements. */
private static void assertSortedListEquals(long[] array, long... elements) {
assertEquals(elements.length, array.length);
Arrays.sort(array);
Arrays.sort(elements);
assertArrayEquals(elements, array);
}
/** Check if the array's content is identical to a given sequence of elements. */
private static void assertSortedListEquals(float[] array, float... elements) {
assertEquals(elements.length, array.length);
Arrays.sort(array);
Arrays.sort(elements);
assertArrayEquals(elements, array);
}
private final int value0 = vcast(0);
private final int value1 = vcast(1);
private final int value2 = vcast(2);
private final int value3 = vcast(3);
private final int value4 = vcast(4);
/** Per-test fresh initialized instance. */
private LongFloatHashMap map = newInstance();
private LongFloatHashMap newInstance() {
return new LongFloatHashMap();
}
/** Convert to target type from an integer used to test stuff. */
private int vcast(int value) {
return value;
}
/** Create a new array of a given type and copy the arguments to this array. */
/* */
private float[] newvArray(int... elements) {
float[] v = new float[elements.length];
for (int i = 0; i < elements.length; i++) {
v[i] = elements[i];
}
return v;
}
private void assertSameMap(final LongFloatHashMap c1, final LongFloatHashMap c2) {
assertEquals(c1.size(), c2.size());
for (LongFloatHashMap.LongFloatCursor entry : c1) {
assertTrue(c2.containsKey(entry.key));
assertEquals2(entry.value, c2.get(entry.key));
}
}
private static void assertEquals2(float v1, float v2) {
assertEquals(v1, v2, 0f);
}
private static void assertArrayEquals(float[] v1, float[] v2) {
assertArrayEquals(v1, v2, 0f);
}
/* */
@Test
public void testEnsureCapacity() {
final AtomicInteger expands = new AtomicInteger();
LongFloatHashMap map =
new LongFloatHashMap(0) {
@Override
protected void allocateBuffers(int arraySize) {
super.allocateBuffers(arraySize);
expands.incrementAndGet();
}
};
// Add some elements.
final int max = rarely() ? 0 : randomIntBetween(0, 250);
for (int i = 0; i < max; i++) {
map.put(cast(i), value0);
}
final int additions = randomIntBetween(max, max + 5000);
map.ensureCapacity(additions + map.size());
final int before = expands.get();
for (int i = 0; i < additions; i++) {
map.put(cast(i), value0);
}
assertEquals(before, expands.get());
}
@Test
public void testCursorIndexIsValid() {
map.put(keyE, value1);
map.put(key1, value2);
map.put(key2, value3);
for (LongFloatHashMap.LongFloatCursor c : map) {
assertTrue(map.indexExists(c.index));
assertEquals2(c.value, map.indexGet(c.index));
}
}
@Test
public void testIndexMethods() {
map.put(keyE, value1);
map.put(key1, value2);
assertTrue(map.indexOf(keyE) >= 0);
assertTrue(map.indexOf(key1) >= 0);
assertTrue(map.indexOf(key2) < 0);
assertTrue(map.indexExists(map.indexOf(keyE)));
assertTrue(map.indexExists(map.indexOf(key1)));
assertFalse(map.indexExists(map.indexOf(key2)));
assertEquals2(value1, map.indexGet(map.indexOf(keyE)));
assertEquals2(value2, map.indexGet(map.indexOf(key1)));
expectThrows(
AssertionError.class,
() -> {
map.indexGet(map.indexOf(key2));
});
assertEquals2(value1, map.indexReplace(map.indexOf(keyE), value3));
assertEquals2(value2, map.indexReplace(map.indexOf(key1), value4));
assertEquals2(value3, map.indexGet(map.indexOf(keyE)));
assertEquals2(value4, map.indexGet(map.indexOf(key1)));
map.indexInsert(map.indexOf(key2), key2, value1);
assertEquals2(value1, map.indexGet(map.indexOf(key2)));
assertEquals(3, map.size());
assertEquals2(value3, map.indexRemove(map.indexOf(keyE)));
assertEquals(2, map.size());
assertEquals2(value1, map.indexRemove(map.indexOf(key2)));
assertEquals(1, map.size());
assertTrue(map.indexOf(keyE) < 0);
assertTrue(map.indexOf(key1) >= 0);
assertTrue(map.indexOf(key2) < 0);
}
/* */
@Test
public void testCloningConstructor() {
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value3);
assertSameMap(map, new LongFloatHashMap(map));
}
/* */
@Test
public void testFromArrays() {
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value3);
LongFloatHashMap map2 =
LongFloatHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3));
assertSameMap(map, map2);
}
@Test
public void testGetOrDefault() {
map.put(key2, value2);
assertTrue(map.containsKey(key2));
map.put(key1, value1);
assertEquals2(value1, map.getOrDefault(key1, value3));
assertEquals2(value3, map.getOrDefault(key3, value3));
map.remove(key1);
assertEquals2(value3, map.getOrDefault(key1, value3));
}
/* */
@Test
public void testPut() {
map.put(key1, value1);
assertTrue(map.containsKey(key1));
assertEquals2(value1, map.get(key1));
}
/* */
@Test
public void testPutOverExistingKey() {
map.put(key1, value1);
assertEquals2(value1, map.put(key1, value3));
assertEquals2(value3, map.get(key1));
}
/* */
@Test
public void testPutWithExpansions() {
final int COUNT = 10000;
final Random rnd = new Random(random().nextLong());
final HashSet<Object> values = new HashSet<Object>();
for (int i = 0; i < COUNT; i++) {
final int v = rnd.nextInt();
final boolean hadKey = values.contains(cast(v));
values.add(cast(v));
assertEquals(hadKey, map.containsKey(cast(v)));
map.put(cast(v), vcast(v));
assertEquals(values.size(), map.size());
}
assertEquals(values.size(), map.size());
}
/* */
@Test
public void testPutAll() {
map.put(key1, value1);
map.put(key2, value1);
LongFloatHashMap map2 = newInstance();
map2.put(key2, value2);
map2.put(keyE, value1);
// One new key (keyE).
assertEquals(1, map.putAll(map2));
// Assert the value under key2 has been replaced.
assertEquals2(value2, map.get(key2));
// And key3 has been added.
assertEquals2(value1, map.get(keyE));
assertEquals(3, map.size());
}
/* */
@Test
public void testPutIfAbsent() {
assertTrue(map.putIfAbsent(key1, value1));
assertFalse(map.putIfAbsent(key1, value2));
assertEquals2(value1, map.get(key1));
}
@Test
public void testPutOrAdd() {
assertEquals2(value1, map.putOrAdd(key1, value1, value2));
assertEquals2(value3, map.putOrAdd(key1, value1, value2));
}
@Test
public void testAddTo() {
assertEquals2(value1, map.addTo(key1, value1));
assertEquals2(value3, map.addTo(key1, value2));
}
/* */
@Test
public void testRemove() {
map.put(key1, value1);
assertEquals2(value1, map.remove(key1));
assertEquals2(0, map.remove(key1));
assertEquals(0, map.size());
// These are internals, but perhaps worth asserting too.
assertEquals(0, map.assigned);
}
/* */
@Test
public void testEmptyKey() {
final int empty = 0;
map.put(empty, value1);
assertEquals(1, map.size());
assertFalse(map.isEmpty());
assertEquals2(value1, map.get(empty));
assertEquals2(value1, map.getOrDefault(empty, value2));
assertTrue(map.iterator().hasNext());
assertEquals(empty, map.iterator().next().key);
assertEquals2(value1, map.iterator().next().value);
map.remove(empty);
assertEquals2(0, map.get(empty));
assertEquals(0, map.size());
assertEquals2(0, map.put(empty, value1));
assertEquals2(value1, map.put(empty, value2));
map.clear();
assertFalse(map.indexExists(map.indexOf(empty)));
assertEquals2(0, map.put(empty, value1));
map.clear();
assertEquals2(0, map.remove(empty));
}
/* */
@Test
public void testMapKeySet() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
assertSortedListEquals(map.keys().toArray(), key1, key2, key3);
}
/* */
@Test
public void testMapKeySetIterator() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
int counted = 0;
for (LongCursor c : map.keys()) {
assertEquals(map.keys[c.index], c.value);
counted++;
}
assertEquals(counted, map.size());
}
/* */
@Test
public void testClear() {
map.put(key1, value1);
map.put(key2, value1);
map.clear();
assertEquals(0, map.size());
// These are internals, but perhaps worth asserting too.
assertEquals(0, map.assigned);
// Check values are cleared.
assertEquals2(0, map.put(key1, value1));
assertEquals2(0, map.remove(key2));
map.clear();
// Check if the map behaves properly upon subsequent use.
testPutWithExpansions();
}
/* */
@Test
public void testRelease() {
map.put(key1, value1);
map.put(key2, value1);
map.release();
assertEquals(0, map.size());
// These are internals, but perhaps worth asserting too.
assertEquals(0, map.assigned);
// Check if the map behaves properly upon subsequent use.
testPutWithExpansions();
}
/* */
@Test
public void testIterable() {
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value3);
map.remove(key2);
int count = 0;
for (LongFloatHashMap.LongFloatCursor cursor : map) {
count++;
assertTrue(map.containsKey(cursor.key));
assertEquals2(cursor.value, map.get(cursor.key));
assertEquals2(cursor.value, map.values[cursor.index]);
assertEquals(cursor.key, map.keys[cursor.index]);
}
assertEquals(count, map.size());
map.clear();
assertFalse(map.iterator().hasNext());
}
/* */
@Test
public void testBug_HPPC73_FullCapacityGet() {
final AtomicInteger reallocations = new AtomicInteger();
final int elements = 0x7F;
map =
new LongFloatHashMap(elements, 1f) {
@Override
protected double verifyLoadFactor(double loadFactor) {
// Skip load factor sanity range checking.
return loadFactor;
}
@Override
protected void allocateBuffers(int arraySize) {
super.allocateBuffers(arraySize);
reallocations.incrementAndGet();
}
};
int reallocationsBefore = reallocations.get();
assertEquals(reallocationsBefore, 1);
for (int i = 1; i <= elements; i++) {
map.put(cast(i), value1);
}
// Non-existent key.
long outOfSet = cast(elements + 1);
map.remove(outOfSet);
assertFalse(map.containsKey(outOfSet));
assertEquals(reallocationsBefore, reallocations.get());
// Should not expand because we're replacing an existing element.
map.put(key1, value2);
assertEquals(reallocationsBefore, reallocations.get());
// Remove from a full map.
map.remove(key1);
assertEquals(reallocationsBefore, reallocations.get());
map.put(key1, value2);
// Check expand on "last slot of a full map" condition.
map.put(outOfSet, value1);
assertEquals(reallocationsBefore + 1, reallocations.get());
}
@Test
public void testHashCodeEquals() {
LongFloatHashMap l0 = newInstance();
assertEquals(0, l0.hashCode());
assertEquals(l0, newInstance());
LongFloatHashMap l1 =
LongFloatHashMap.from(newArray(key1, key2, key3), newvArray(value1, value2, value3));
LongFloatHashMap l2 =
LongFloatHashMap.from(newArray(key2, key1, key3), newvArray(value2, value1, value3));
LongFloatHashMap l3 = LongFloatHashMap.from(newArray(key1, key2), newvArray(value2, value1));
assertEquals(l1.hashCode(), l2.hashCode());
assertEquals(l1, l2);
assertNotEquals(l1, l3);
assertNotEquals(l2, l3);
}
@Test
public void testBug_HPPC37() {
LongFloatHashMap l1 = LongFloatHashMap.from(newArray(key1), newvArray(value1));
LongFloatHashMap l2 = LongFloatHashMap.from(newArray(key2), newvArray(value1));
assertNotEquals(l1, l2);
assertNotEquals(l2, l1);
}
/** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */
@Test
@SuppressWarnings({"rawtypes", "unchecked"})
public void testAgainstHashMap() {
final Random rnd = RandomizedTest.getRandom();
final HashMap other = new HashMap();
for (int size = 1000; size < 20000; size += 4000) {
other.clear();
map.clear();
for (int round = 0; round < size * 20; round++) {
long key = cast(rnd.nextInt(size));
if (rnd.nextInt(50) == 0) {
key = 0;
}
float value = vcast(rnd.nextInt());
boolean hadOldValue = map.containsKey(key);
if (rnd.nextBoolean()) {
float previousValue;
if (rnd.nextBoolean()) {
int index = map.indexOf(key);
if (map.indexExists(index)) {
previousValue = map.indexReplace(index, value);
} else {
map.indexInsert(index, key, value);
previousValue = 0;
}
} else {
previousValue = map.put(key, value);
}
assertEquals(
other.put(key, value), ((previousValue) == 0) && !hadOldValue ? null : previousValue);
assertEquals2(value, map.get(key));
assertEquals2(value, map.indexGet(map.indexOf(key)));
assertTrue(map.containsKey(key));
assertTrue(map.indexExists(map.indexOf(key)));
} else {
assertEquals(other.containsKey(key), map.containsKey(key));
float previousValue =
map.containsKey(key) && rnd.nextBoolean()
? map.indexRemove(map.indexOf(key))
: map.remove(key);
assertEquals(
other.remove(key), ((previousValue) == 0) && !hadOldValue ? null : previousValue);
}
assertEquals(other.size(), map.size());
}
}
}
/*
*
*/
@Test
public void testClone() {
this.map.put(key1, value1);
this.map.put(key2, value2);
this.map.put(key3, value3);
LongFloatHashMap cloned = map.clone();
cloned.remove(key1);
assertSortedListEquals(map.keys().toArray(), key1, key2, key3);
assertSortedListEquals(cloned.keys().toArray(), key2, key3);
}
/* */
@Test
public void testMapValues() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
assertSortedListEquals(map.values().toArray(), value1, value2, value3);
map.clear();
map.put(key1, value1);
map.put(key2, value2);
map.put(key3, value2);
assertSortedListEquals(map.values().toArray(), value1, value2, value2);
}
/* */
@Test
public void testMapValuesIterator() {
map.put(key1, value3);
map.put(key2, value2);
map.put(key3, value1);
int counted = 0;
for (FloatCursor c : map.values()) {
assertEquals2(map.values[c.index], c.value);
counted++;
}
assertEquals(counted, map.size());
}
/* */
@Test
public void testEqualsSameClass() {
LongFloatHashMap l1 = newInstance();
l1.put(key1, value0);
l1.put(key2, value1);
l1.put(key3, value2);
LongFloatHashMap l2 = new LongFloatHashMap(l1);
l2.putAll(l1);
LongFloatHashMap l3 = new LongFloatHashMap(l2);
l3.putAll(l2);
l3.put(key4, value0);
assertEquals(l2, l1);
assertEquals(l2.hashCode(), l1.hashCode());
assertNotEquals(l1, l3);
}
/* */
@Test
public void testEqualsSubClass() {
class Sub extends LongFloatHashMap {}
LongFloatHashMap l1 = newInstance();
l1.put(key1, value0);
l1.put(key2, value1);
l1.put(key3, value2);
LongFloatHashMap l2 = new Sub();
l2.putAll(l1);
l2.put(key4, value3);
LongFloatHashMap l3 = new Sub();
l3.putAll(l2);
assertNotEquals(l1, l2);
assertEquals(l3.hashCode(), l2.hashCode());
assertEquals(l3, l2);
}
}

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.*;

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.RandomizedTest;
import java.util.Arrays; import java.util.Arrays;
@ -24,7 +24,6 @@ import java.util.HashSet;
import java.util.Random; import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase;
import org.junit.After;
import org.junit.Test; import org.junit.Test;
/** /**
@ -86,23 +85,6 @@ public class TestLongIntHashMap extends LuceneTestCase {
return new LongIntHashMap(); return new LongIntHashMap();
} }
@After
public void checkEmptySlotsUninitialized() {
if (map != null) {
int occupied = 0;
for (int i = 0; i <= map.mask; i++) {
if (((map.keys[i]) == 0)) {
} else {
occupied++;
}
}
assertEquals(occupied, map.assigned);
if (!map.hasEmptyKey) {}
}
}
/** Convert to target type from an integer used to test stuff. */ /** Convert to target type from an integer used to test stuff. */
private int vcast(int value) { private int vcast(int value) {
return value; return value;
@ -335,10 +317,10 @@ public class TestLongIntHashMap extends LuceneTestCase {
map.put(empty, value1); map.put(empty, value1);
assertEquals(1, map.size()); assertEquals(1, map.size());
assertEquals(false, map.isEmpty()); assertFalse(map.isEmpty());
assertEquals(value1, map.get(empty)); assertEquals(value1, map.get(empty));
assertEquals(value1, map.getOrDefault(empty, value2)); assertEquals(value1, map.getOrDefault(empty, value2));
assertEquals(true, map.iterator().hasNext()); assertTrue(map.iterator().hasNext());
assertEquals(empty, map.iterator().next().key); assertEquals(empty, map.iterator().next().key);
assertEquals(value1, map.iterator().next().value); assertEquals(value1, map.iterator().next().value);
@ -501,8 +483,8 @@ public class TestLongIntHashMap extends LuceneTestCase {
assertEquals(l1.hashCode(), l2.hashCode()); assertEquals(l1.hashCode(), l2.hashCode());
assertEquals(l1, l2); assertEquals(l1, l2);
assertFalse(l1.equals(l3)); assertNotEquals(l1, l3);
assertFalse(l2.equals(l3)); assertNotEquals(l2, l3);
} }
@Test @Test
@ -511,8 +493,8 @@ public class TestLongIntHashMap extends LuceneTestCase {
LongIntHashMap l2 = LongIntHashMap.from(newArray(key2), newvArray(value1)); LongIntHashMap l2 = LongIntHashMap.from(newArray(key2), newvArray(value1));
assertFalse(l1.equals(l2)); assertNotEquals(l1, l2);
assertFalse(l2.equals(l1)); assertNotEquals(l2, l1);
} }
/** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */ /** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */

View File

@ -15,7 +15,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.lucene.util.hppc; package org.apache.lucene.internal.hppc;
import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.RandomizedTest;
import java.util.Arrays; import java.util.Arrays;
@ -24,7 +24,6 @@ import java.util.HashSet;
import java.util.Random; import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase;
import org.junit.After;
import org.junit.Test; import org.junit.Test;
/** /**
@ -86,23 +85,6 @@ public class TestLongObjectHashMap extends LuceneTestCase {
return new LongObjectHashMap(); return new LongObjectHashMap();
} }
@After
public void checkEmptySlotsUninitialized() {
if (map != null) {
int occupied = 0;
for (int i = 0; i <= map.mask; i++) {
if (((map.keys[i]) == 0)) {
} else {
occupied++;
}
}
assertEquals(occupied, map.assigned);
if (!map.hasEmptyKey) {}
}
}
/** Convert to target type from an integer used to test stuff. */ /** Convert to target type from an integer used to test stuff. */
private int vcast(int value) { private int vcast(int value) {
return value; return value;
@ -325,7 +307,7 @@ public class TestLongObjectHashMap extends LuceneTestCase {
public void testRemove() { public void testRemove() {
map.put(key1, value1); map.put(key1, value1);
assertEquals(value1, map.remove(key1)); assertEquals(value1, map.remove(key1));
assertEquals(null, map.remove(key1)); assertNull(map.remove(key1));
assertEquals(0, map.size()); assertEquals(0, map.size());
// These are internals, but perhaps worth asserting too. // These are internals, but perhaps worth asserting too.
@ -339,15 +321,15 @@ public class TestLongObjectHashMap extends LuceneTestCase {
map.put(empty, value1); map.put(empty, value1);
assertEquals(1, map.size()); assertEquals(1, map.size());
assertEquals(false, map.isEmpty()); assertFalse(map.isEmpty());
assertEquals(value1, map.get(empty)); assertEquals(value1, map.get(empty));
assertEquals(value1, map.getOrDefault(empty, value2)); assertEquals(value1, map.getOrDefault(empty, value2));
assertEquals(true, map.iterator().hasNext()); assertTrue(map.iterator().hasNext());
assertEquals(empty, map.iterator().next().key); assertEquals(empty, map.iterator().next().key);
assertEquals(value1, map.iterator().next().value); assertEquals(value1, map.iterator().next().value);
map.remove(empty); map.remove(empty);
assertEquals(null, map.get(empty)); assertNull(map.get(empty));
assertEquals(0, map.size()); assertEquals(0, map.size());
map.put(empty, null); map.put(empty, null);
@ -360,13 +342,13 @@ public class TestLongObjectHashMap extends LuceneTestCase {
assertFalse(map.containsKey(empty)); assertFalse(map.containsKey(empty));
assertNull(map.get(empty)); assertNull(map.get(empty));
assertEquals(null, map.put(empty, value1)); assertNull(map.put(empty, value1));
assertEquals(value1, map.put(empty, value2)); assertEquals(value1, map.put(empty, value2));
map.clear(); map.clear();
assertFalse(map.indexExists(map.indexOf(empty))); assertFalse(map.indexExists(map.indexOf(empty)));
assertEquals(null, map.put(empty, value1)); assertNull(map.put(empty, value1));
map.clear(); map.clear();
assertEquals(null, map.remove(empty)); assertNull(map.remove(empty));
} }
/* */ /* */
@ -406,8 +388,8 @@ public class TestLongObjectHashMap extends LuceneTestCase {
assertEquals(0, map.assigned); assertEquals(0, map.assigned);
// Check values are cleared. // Check values are cleared.
assertEquals(null, map.put(key1, value1)); assertNull(map.put(key1, value1));
assertEquals(null, map.remove(key2)); assertNull(map.remove(key2));
map.clear(); map.clear();
// Check if the map behaves properly upon subsequent use. // Check if the map behaves properly upon subsequent use.
@ -515,8 +497,8 @@ public class TestLongObjectHashMap extends LuceneTestCase {
assertEquals(l1.hashCode(), l2.hashCode()); assertEquals(l1.hashCode(), l2.hashCode());
assertEquals(l1, l2); assertEquals(l1, l2);
assertFalse(l1.equals(l3)); assertNotEquals(l1, l3);
assertFalse(l2.equals(l3)); assertNotEquals(l2, l3);
} }
@Test @Test
@ -525,8 +507,8 @@ public class TestLongObjectHashMap extends LuceneTestCase {
LongObjectHashMap l2 = LongObjectHashMap.from(newArray(key2), newvArray(value1)); LongObjectHashMap l2 = LongObjectHashMap.from(newArray(key2), newvArray(value1));
assertFalse(l1.equals(l2)); assertNotEquals(l1, l2);
assertFalse(l2.equals(l1)); assertNotEquals(l2, l1);
} }
/** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */ /** Runs random insertions/deletions/clearing and compares the results against {@link HashMap}. */

View File

@ -116,7 +116,7 @@ public class TestModularLayer extends AbstractLuceneDistributionTest {
public void testExpectedDistributionModuleNames() { public void testExpectedDistributionModuleNames() {
Assertions.assertThat( Assertions.assertThat(
allLuceneModules.stream().map(module -> module.descriptor().name()).sorted()) allLuceneModules.stream().map(module -> module.descriptor().name()).sorted())
.containsExactly( .containsOnly(
"org.apache.lucene.analysis.common", "org.apache.lucene.analysis.common",
"org.apache.lucene.analysis.icu", "org.apache.lucene.analysis.icu",
"org.apache.lucene.analysis.kuromoji", "org.apache.lucene.analysis.kuromoji",
@ -353,6 +353,9 @@ public class TestModularLayer extends AbstractLuceneDistributionTest {
moduleExports.removeIf( moduleExports.removeIf(
export -> { export -> {
boolean isInternal = export.source().startsWith("org.apache.lucene.internal"); boolean isInternal = export.source().startsWith("org.apache.lucene.internal");
if (isInternal && export.source().equals("org.apache.lucene.internal.hppc")) {
return true;
}
if (isInternal) { if (isInternal) {
Assertions.assertThat(export.targets()) Assertions.assertThat(export.targets())
.containsExactlyInAnyOrder("org.apache.lucene.test_framework"); .containsExactlyInAnyOrder("org.apache.lucene.test_framework");

View File

@ -22,7 +22,6 @@ description = 'Faceted indexing and search capabilities'
dependencies { dependencies {
moduleApi project(':lucene:core') moduleApi project(':lucene:core')
moduleImplementation 'com.carrotsearch:hppc'
moduleTestImplementation project(':lucene:test-framework') moduleTestImplementation project(':lucene:test-framework')
moduleTestImplementation project(':lucene:queries') moduleTestImplementation project(':lucene:queries')

View File

@ -16,9 +16,7 @@
*/ */
/** Faceted indexing and search capabilities */ /** Faceted indexing and search capabilities */
@SuppressWarnings({"requires-automatic"})
module org.apache.lucene.facet { module org.apache.lucene.facet {
requires com.carrotsearch.hppc;
requires org.apache.lucene.core; requires org.apache.lucene.core;
exports org.apache.lucene.facet; exports org.apache.lucene.facet;

View File

@ -23,6 +23,8 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.internal.hppc.IntCursor;
import org.apache.lucene.internal.hppc.IntHashSet;
import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.BoostQuery;
@ -31,8 +33,6 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.hppc.IntCursor;
import org.apache.lucene.util.hppc.IntHashSet;
/** /**
* A {@link Query} for drill-down over facet categories. You should call {@link #add(String, * A {@link Query} for drill-down over facet categories. You should call {@link #add(String,

View File

@ -17,8 +17,6 @@
package org.apache.lucene.facet; package org.apache.lucene.facet;
import com.carrotsearch.hppc.LongIntHashMap;
import com.carrotsearch.hppc.cursors.LongIntCursor;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -30,6 +28,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.internal.hppc.LongIntHashMap;
import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.ConjunctionUtils;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.LongValues; import org.apache.lucene.search.LongValues;
@ -391,7 +390,7 @@ public class LongValueFacetCounts extends Facets {
} }
} }
if (hashCounts.size() != 0) { if (hashCounts.size() != 0) {
for (LongIntCursor c : hashCounts) { for (LongIntHashMap.LongIntCursor c : hashCounts) {
int count = c.value; int count = c.value;
if (count != 0) { if (count != 0) {
labelValues.add(new LabelAndValue(Long.toString(c.key), c.value)); labelValues.add(new LabelAndValue(Long.toString(c.key), c.value));
@ -443,7 +442,7 @@ public class LongValueFacetCounts extends Facets {
if (hashCounts.size() != 0) { if (hashCounts.size() != 0) {
childCount += hashCounts.size(); childCount += hashCounts.size();
for (LongIntCursor c : hashCounts) { for (LongIntHashMap.LongIntCursor c : hashCounts) {
int count = c.value; int count = c.value;
if (count != 0) { if (count != 0) {
if (e == null) { if (e == null) {
@ -493,7 +492,7 @@ public class LongValueFacetCounts extends Facets {
long[] hashValues = new long[this.hashCounts.size()]; long[] hashValues = new long[this.hashCounts.size()];
int upto = 0; int upto = 0;
for (LongIntCursor c : this.hashCounts) { for (LongIntHashMap.LongIntCursor c : this.hashCounts) {
if (c.value != 0) { if (c.value != 0) {
hashCounts[upto] = c.value; hashCounts[upto] = c.value;
hashValues[upto] = c.key; hashValues[upto] = c.key;
@ -592,7 +591,7 @@ public class LongValueFacetCounts extends Facets {
} }
if (hashCounts.size() != 0) { if (hashCounts.size() != 0) {
for (LongIntCursor c : hashCounts) { for (LongIntHashMap.LongIntCursor c : hashCounts) {
if (c.value != 0) { if (c.value != 0) {
b.append(" "); b.append(" ");
b.append(c.key); b.append(c.key);

Some files were not shown because too many files have changed in this diff Show More