HDDS-2143. Rename classes under package org.apache.hadoop.utils
Closes #1465
This commit is contained in:
parent
285ed0a849
commit
6d4b20c047
|
@ -16,7 +16,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hdds;
|
||||
|
||||
import org.apache.hadoop.utils.db.DBProfile;
|
||||
import org.apache.hadoop.hdds.utils.db.DBProfile;
|
||||
|
||||
/**
|
||||
* This class contains constants for configuration keys and default values
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hdds.cli;
|
||||
|
||||
import org.apache.hadoop.utils.HddsVersionInfo;
|
||||
import org.apache.hadoop.hdds.utils.HddsVersionInfo;
|
||||
|
||||
import picocli.CommandLine.IVersionProvider;
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
* the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.collect.Lists;
|
|
@ -15,7 +15,7 @@
|
|||
* the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import java.util.concurrent.Callable;
|
||||
|
|
@ -15,7 +15,7 @@
|
|||
* the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import java.util.PriorityQueue;
|
||||
|
|
@ -15,7 +15,7 @@
|
|||
* the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
/**
|
||||
* Result of a {@link BackgroundTask}.
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
|
@ -16,10 +16,10 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import org.apache.commons.lang3.tuple.ImmutablePair;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters.MetadataKeyFilter;
|
||||
import org.fusesource.leveldbjni.JniDBFactory;
|
||||
import org.iq80.leveldb.DB;
|
||||
import org.iq80.leveldb.DBIterator;
|
|
@ -16,19 +16,16 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import org.iq80.leveldb.DBIterator;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
import org.apache.hadoop.utils.MetadataStore.KeyValue;
|
||||
|
||||
|
||||
/**
|
||||
* LevelDB store iterator.
|
||||
*/
|
||||
public class LevelDBStoreIterator implements MetaStoreIterator<KeyValue> {
|
||||
public class LevelDBStoreIterator implements MetaStoreIterator< MetadataStore.KeyValue > {
|
||||
|
||||
|
||||
private DBIterator levelDBIterator;
|
||||
|
@ -44,10 +41,10 @@ public class LevelDBStoreIterator implements MetaStoreIterator<KeyValue> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public KeyValue next() {
|
||||
public MetadataStore.KeyValue next() {
|
||||
if(levelDBIterator.hasNext()) {
|
||||
Map.Entry<byte[], byte[]> entry = levelDBIterator.next();
|
||||
return KeyValue.create(entry.getKey(), entry.getValue());
|
||||
return MetadataStore.KeyValue.create(entry.getKey(), entry.getValue());
|
||||
}
|
||||
throw new NoSuchElementException("LevelDB Store has no more elements");
|
||||
}
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
|
@ -15,7 +15,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.base.Strings;
|
|
@ -16,11 +16,11 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import org.apache.commons.lang3.tuple.ImmutablePair;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters.MetadataKeyFilter;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
|
@ -15,7 +15,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import org.apache.hadoop.io.retry.RetryPolicy;
|
||||
import org.apache.hadoop.util.ThreadUtil;
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.commons.io.FileUtils;
|
|
@ -17,18 +17,16 @@
|
|||
*/
|
||||
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import org.rocksdb.RocksIterator;
|
||||
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
import org.apache.hadoop.utils.MetadataStore.KeyValue;
|
||||
|
||||
/**
|
||||
* RocksDB store iterator.
|
||||
*/
|
||||
public class RocksDBStoreIterator implements MetaStoreIterator<KeyValue> {
|
||||
public class RocksDBStoreIterator implements MetaStoreIterator< MetadataStore.KeyValue > {
|
||||
|
||||
private RocksIterator rocksDBIterator;
|
||||
|
||||
|
@ -43,9 +41,9 @@ public class RocksDBStoreIterator implements MetaStoreIterator<KeyValue> {
|
|||
}
|
||||
|
||||
@Override
|
||||
public KeyValue next() {
|
||||
public MetadataStore.KeyValue next() {
|
||||
if (rocksDBIterator.isValid()) {
|
||||
KeyValue value = KeyValue.create(rocksDBIterator.key(), rocksDBIterator
|
||||
MetadataStore.KeyValue value = MetadataStore.KeyValue.create(rocksDBIterator.key(), rocksDBIterator
|
||||
.value());
|
||||
rocksDBIterator.next();
|
||||
return value;
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import org.apache.hadoop.metrics2.MetricsCollector;
|
||||
import org.apache.hadoop.metrics2.MetricsRecordBuilder;
|
|
@ -15,7 +15,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import org.apache.ratis.util.function.CheckedRunnable;
|
||||
import org.slf4j.Logger;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import org.apache.hadoop.hdds.HddsUtils;
|
||||
|
|
@ -15,7 +15,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
/**
|
||||
* Class represents a batch operation, collects multiple db operation.
|
|
@ -16,9 +16,9 @@
|
|||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import org.apache.hadoop.utils.db.Table.KeyValue;
|
||||
import org.apache.hadoop.hdds.utils.db.Table.KeyValue;
|
||||
|
||||
/**
|
||||
* Key value for raw Table implementations.
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.eclipse.jetty.util.StringUtil;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import org.apache.hadoop.conf.StorageUnit;
|
||||
import org.rocksdb.BlockBasedTableConfig;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -25,7 +25,7 @@ import java.util.ArrayList;
|
|||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.utils.db.cache.TableCacheImpl;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl;
|
||||
|
||||
/**
|
||||
* The DBStore interface provides the ability to create Tables, which store
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.conf.Configuration;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import com.google.common.primitives.Longs;
|
||||
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import static org.apache.hadoop.ozone.OzoneConsts.OM_DB_CHECKPOINTS_DIR_NAME;
|
||||
|
||||
|
@ -33,12 +33,12 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.hdds.HddsUtils;
|
||||
import org.apache.hadoop.hdds.utils.RocksDBStoreMBean;
|
||||
import org.apache.hadoop.hdfs.DFSUtil;
|
||||
import org.apache.hadoop.metrics2.util.MBeans;
|
||||
import org.apache.hadoop.utils.RocksDBStoreMBean;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.utils.db.cache.TableCacheImpl;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl;
|
||||
import org.apache.ratis.thirdparty.com.google.common.annotations.VisibleForTesting;
|
||||
import org.rocksdb.ColumnFamilyDescriptor;
|
||||
import org.rocksdb.ColumnFamilyHandle;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.NoSuchElementException;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hdfs.DFSUtil;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -25,8 +25,8 @@ import java.util.Map;
|
|||
|
||||
import org.apache.commons.lang3.NotImplementedException;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.utils.db.cache.CacheKey;
|
||||
import org.apache.hadoop.utils.db.cache.CacheValue;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.CacheKey;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.CacheValue;
|
||||
/**
|
||||
* Interface for key-value store that stores ozone metadata. Ozone metadata is
|
||||
* stored as key value pairs, both key and value are arbitrary byte arrays. Each
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -24,15 +24,15 @@ import java.util.Map;
|
|||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Optional;
|
||||
import org.apache.hadoop.utils.db.cache.CacheKey;
|
||||
import org.apache.hadoop.utils.db.cache.CacheResult;
|
||||
import org.apache.hadoop.utils.db.cache.CacheValue;
|
||||
import org.apache.hadoop.utils.db.cache.TableCacheImpl;
|
||||
import org.apache.hadoop.utils.db.cache.TableCache;
|
||||
import org.apache.hadoop.utils.db.cache.TableCacheImpl.CacheCleanupPolicy;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.CacheKey;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.CacheResult;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.CacheValue;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.TableCache;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl.CacheCleanupPolicy;
|
||||
|
||||
import static org.apache.hadoop.utils.db.cache.CacheResult.CacheStatus.EXISTS;
|
||||
import static org.apache.hadoop.utils.db.cache.CacheResult.CacheStatus.NOT_EXIST;
|
||||
import static org.apache.hadoop.hdds.utils.db.cache.CacheResult.CacheStatus.EXISTS;
|
||||
import static org.apache.hadoop.hdds.utils.db.cache.CacheResult.CacheStatus.NOT_EXIST;
|
||||
/**
|
||||
* Strongly typed table implementation.
|
||||
* <p>
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db.cache;
|
||||
package org.apache.hadoop.hdds.utils.db.cache;
|
||||
|
||||
import java.util.Objects;
|
||||
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db.cache;
|
||||
package org.apache.hadoop.hdds.utils.db.cache;
|
||||
|
||||
import java.util.Objects;
|
||||
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db.cache;
|
||||
package org.apache.hadoop.hdds.utils.db.cache;
|
||||
|
||||
import com.google.common.base.Optional;
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db.cache;
|
||||
package org.apache.hadoop.hdds.utils.db.cache;
|
||||
|
||||
import java.util.Objects;
|
||||
|
|
@ -17,12 +17,11 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db.cache;
|
||||
package org.apache.hadoop.hdds.utils.db.cache;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Evolving;
|
||||
import org.apache.hadoop.utils.db.cache.CacheResult.CacheStatus;
|
||||
import org.apache.hadoop.utils.db.cache.TableCacheImpl.CacheCleanupPolicy;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -78,15 +77,15 @@ public interface TableCache<CACHEKEY extends CacheKey,
|
|||
* Check key exist in cache or not.
|
||||
*
|
||||
* If it exists return CacheResult with value and status as
|
||||
* {@link CacheStatus#EXISTS}
|
||||
* {@link CacheResult.CacheStatus#EXISTS}
|
||||
*
|
||||
* If it does not exist:
|
||||
* If cache clean up policy is
|
||||
* {@link TableCacheImpl.CacheCleanupPolicy#NEVER} it means table cache is
|
||||
* full cache. It return's {@link CacheResult} with null
|
||||
* and status as {@link CacheStatus#NOT_EXIST}.
|
||||
* and status as {@link CacheResult.CacheStatus#NOT_EXIST}.
|
||||
*
|
||||
* If cache clean up policy is {@link CacheCleanupPolicy#MANUAL} it means
|
||||
* If cache clean up policy is {@link TableCacheImpl.CacheCleanupPolicy#MANUAL} it means
|
||||
* table cache is partial cache. It return's {@link CacheResult} with
|
||||
* null and status as MAY_EXIST.
|
||||
*
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db.cache;
|
||||
package org.apache.hadoop.hdds.utils.db.cache;
|
||||
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
|
@ -15,4 +15,4 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.utils.db.cache;
|
||||
package org.apache.hadoop.hdds.utils.db.cache;
|
|
@ -19,4 +19,4 @@
|
|||
/**
|
||||
* Database interfaces for Ozone.
|
||||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
|
@ -15,4 +15,4 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
|
@ -15,7 +15,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
|
@ -14,7 +14,7 @@
|
|||
* License for the specific language governing permissions and limitations under
|
||||
* the License.
|
||||
*/
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
@ -25,9 +25,8 @@ import org.apache.hadoop.hdfs.DFSUtil;
|
|||
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||
import org.apache.hadoop.ozone.OzoneConfigKeys;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter;
|
||||
import org.apache.hadoop.utils.MetadataStore.KeyValue;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters.MetadataKeyFilter;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
|
@ -122,7 +121,7 @@ public class TestMetadataStore {
|
|||
|
||||
//As database is empty, check whether iterator is working as expected or
|
||||
// not.
|
||||
MetaStoreIterator<KeyValue> metaStoreIterator = dbStore.iterator();
|
||||
MetaStoreIterator< MetadataStore.KeyValue > metaStoreIterator = dbStore.iterator();
|
||||
assertFalse(metaStoreIterator.hasNext());
|
||||
try {
|
||||
metaStoreIterator.next();
|
||||
|
@ -140,7 +139,7 @@ public class TestMetadataStore {
|
|||
|
||||
int i = 0;
|
||||
while (metaStoreIterator.hasNext()) {
|
||||
KeyValue val = metaStoreIterator.next();
|
||||
MetadataStore.KeyValue val = metaStoreIterator.next();
|
||||
assertEquals("a" + i, getString(val.getKey()));
|
||||
assertEquals("a-value" + i, getString(val.getValue()));
|
||||
i++;
|
|
@ -15,7 +15,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
|
@ -15,7 +15,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.hadoop.hdfs.DFSUtil;
|
||||
|
@ -40,7 +40,7 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static org.apache.hadoop.utils.db.DBConfigFromFile.getOptionsFileNameFromDB;
|
||||
import static org.apache.hadoop.hdds.utils.db.DBConfigFromFile.getOptionsFileNameFromDB;
|
||||
|
||||
/**
|
||||
* DBConf tests.
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import javax.management.MBeanServer;
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -29,11 +29,11 @@ import java.util.Set;
|
|||
import com.google.common.base.Optional;
|
||||
import org.apache.hadoop.hdfs.DFSUtil;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.utils.db.Table.KeyValue;
|
||||
import org.apache.hadoop.hdds.utils.db.Table.KeyValue;
|
||||
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.apache.hadoop.utils.db.cache.CacheKey;
|
||||
import org.apache.hadoop.utils.db.cache.CacheValue;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.CacheKey;
|
||||
import org.apache.hadoop.hdds.utils.db.cache.CacheValue;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
|
@ -17,7 +17,7 @@
|
|||
*
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.utils.db.cache;
|
||||
package org.apache.hadoop.hdds.utils.db.cache;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
|
@ -19,4 +19,4 @@
|
|||
/**
|
||||
* Tests for the DB Cache Utilities.
|
||||
*/
|
||||
package org.apache.hadoop.utils.db.cache;
|
||||
package org.apache.hadoop.hdds.utils.db.cache;
|
|
@ -19,4 +19,4 @@
|
|||
/**
|
||||
* Tests for the DB Utilities.
|
||||
*/
|
||||
package org.apache.hadoop.utils.db;
|
||||
package org.apache.hadoop.hdds.utils.db;
|
|
@ -19,4 +19,4 @@
|
|||
/**
|
||||
* DB test Utils.
|
||||
*/
|
||||
package org.apache.hadoop.utils;
|
||||
package org.apache.hadoop.hdds.utils;
|
|
@ -47,7 +47,7 @@ import org.apache.hadoop.ozone.protocol.commands.DeleteBlockCommandStatus;
|
|||
import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
|
||||
import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
|
||||
import org.apache.hadoop.util.Time;
|
||||
import org.apache.hadoop.utils.BatchOperation;
|
||||
import org.apache.hadoop.hdds.utils.BatchOperation;
|
||||
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
|
|
@ -23,8 +23,8 @@ import org.apache.commons.collections.MapIterator;
|
|||
import org.apache.commons.collections.map.LRUMap;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.ozone.OzoneConfigKeys;
|
||||
import org.apache.hadoop.utils.MetadataStore;
|
||||
import org.apache.hadoop.utils.MetadataStoreBuilder;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStore;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.hadoop.ozone.container.common.utils;
|
|||
import com.google.common.base.Preconditions;
|
||||
|
||||
import org.apache.commons.lang.exception.ExceptionUtils;
|
||||
import org.apache.hadoop.utils.MetadataStore;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStore;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -28,11 +28,11 @@ import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml;
|
|||
import org.apache.hadoop.ozone.container.common.interfaces.BlockIterator;
|
||||
import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
|
||||
import org.apache.hadoop.ozone.container.keyvalue.helpers.KeyValueContainerLocationUtil;
|
||||
import org.apache.hadoop.utils.MetaStoreIterator;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
|
||||
import org.apache.hadoop.hdds.utils.MetaStoreIterator;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
|
||||
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
|
||||
import org.apache.hadoop.utils.MetadataStore.KeyValue;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStore.KeyValue;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -36,9 +36,9 @@ import org.apache.hadoop.ozone.OzoneConsts;
|
|||
import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
|
||||
import org.apache.hadoop.ozone.container.common.helpers.BlockData;
|
||||
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.utils.MetadataStore;
|
||||
import org.apache.hadoop.utils.MetadataStoreBuilder;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStore;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
|
|
@ -33,8 +33,8 @@ import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
|
|||
import org.apache.hadoop.ozone.container.common.interfaces.Container;
|
||||
import org.apache.hadoop.ozone.container.keyvalue.interfaces.BlockManager;
|
||||
import org.apache.hadoop.ozone.container.common.utils.ContainerCache;
|
||||
import org.apache.hadoop.utils.BatchOperation;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.hdds.utils.BatchOperation;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
|
|
@ -40,12 +40,12 @@ import org.apache.hadoop.hdfs.DFSUtil;
|
|||
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
|
||||
import org.apache.hadoop.ozone.OzoneConsts;
|
||||
import org.apache.hadoop.util.Time;
|
||||
import org.apache.hadoop.utils.BackgroundService;
|
||||
import org.apache.hadoop.utils.BackgroundTask;
|
||||
import org.apache.hadoop.utils.BackgroundTaskQueue;
|
||||
import org.apache.hadoop.utils.BackgroundTaskResult;
|
||||
import org.apache.hadoop.utils.BatchOperation;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter;
|
||||
import org.apache.hadoop.hdds.utils.BackgroundService;
|
||||
import org.apache.hadoop.hdds.utils.BackgroundTask;
|
||||
import org.apache.hadoop.hdds.utils.BackgroundTaskQueue;
|
||||
import org.apache.hadoop.hdds.utils.BackgroundTaskResult;
|
||||
import org.apache.hadoop.hdds.utils.BatchOperation;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
|
||||
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
|
|
@ -40,7 +40,7 @@ import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
|
|||
import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml;
|
||||
import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
|
||||
import org.apache.hadoop.ozone.container.keyvalue.helpers.KeyValueContainerUtil;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
|
|
@ -23,8 +23,8 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
|||
import org.apache.hadoop.ozone.OzoneConfigKeys;
|
||||
import org.apache.hadoop.ozone.container.common.utils.ContainerCache;
|
||||
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
|
||||
import org.apache.hadoop.utils.MetadataStore;
|
||||
import org.apache.hadoop.utils.MetadataStoreBuilder;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStore;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.hadoop.ozone.container.common.volume.RoundRobinVolumeChoosingP
|
|||
import org.apache.hadoop.ozone.container.common.volume.VolumeSet;
|
||||
import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hdds.server;
|
||||
|
||||
import static org.apache.hadoop.utils.RocksDBStoreMBean.ROCKSDB_CONTEXT_PREFIX;
|
||||
import static org.apache.hadoop.hdds.utils.RocksDBStoreMBean.ROCKSDB_CONTEXT_PREFIX;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
package org.apache.hadoop.hdds.server;
|
||||
|
||||
import org.apache.hadoop.utils.VersionInfo;
|
||||
import org.apache.hadoop.hdds.utils.VersionInfo;
|
||||
|
||||
/**
|
||||
* Helper base class to report the standard version and runtime information.
|
||||
|
|
|
@ -45,7 +45,7 @@ import org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException;
|
|||
import org.apache.hadoop.hdds.scm.server.StorageContainerManager;
|
||||
import org.apache.hadoop.metrics2.util.MBeans;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.utils.UniqueId;
|
||||
import org.apache.hadoop.hdds.utils.UniqueId;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -47,9 +47,9 @@ import org.apache.hadoop.hdds.scm.container.ContainerReplica;
|
|||
import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore;
|
||||
import org.apache.hadoop.hdds.server.events.EventHandler;
|
||||
import org.apache.hadoop.hdds.server.events.EventPublisher;
|
||||
import org.apache.hadoop.utils.db.BatchOperation;
|
||||
import org.apache.hadoop.utils.db.Table;
|
||||
import org.apache.hadoop.utils.db.TableIterator;
|
||||
import org.apache.hadoop.hdds.utils.db.BatchOperation;
|
||||
import org.apache.hadoop.hdds.utils.db.Table;
|
||||
import org.apache.hadoop.hdds.utils.db.TableIterator;
|
||||
import org.eclipse.jetty.util.ConcurrentHashSet;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
|
|
@ -30,10 +30,10 @@ import org.apache.hadoop.hdds.server.events.EventPublisher;
|
|||
import org.apache.hadoop.ozone.protocol.commands.CommandForDatanode;
|
||||
import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
|
||||
import org.apache.hadoop.util.Time;
|
||||
import org.apache.hadoop.utils.BackgroundService;
|
||||
import org.apache.hadoop.utils.BackgroundTask;
|
||||
import org.apache.hadoop.utils.BackgroundTaskQueue;
|
||||
import org.apache.hadoop.utils.BackgroundTaskResult.EmptyTaskResult;
|
||||
import org.apache.hadoop.hdds.utils.BackgroundService;
|
||||
import org.apache.hadoop.hdds.utils.BackgroundTask;
|
||||
import org.apache.hadoop.hdds.utils.BackgroundTaskQueue;
|
||||
import org.apache.hadoop.hdds.utils.BackgroundTaskResult.EmptyTaskResult;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -33,9 +33,9 @@ import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
|
|||
import org.apache.hadoop.hdds.server.ServerUtils;
|
||||
import org.apache.hadoop.hdds.server.events.EventPublisher;
|
||||
import org.apache.hadoop.ozone.OzoneConsts;
|
||||
import org.apache.hadoop.utils.BatchOperation;
|
||||
import org.apache.hadoop.utils.MetadataStore;
|
||||
import org.apache.hadoop.utils.MetadataStoreBuilder;
|
||||
import org.apache.hadoop.hdds.utils.BatchOperation;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStore;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.hadoop.metrics2.util.MBeans;
|
|||
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.utils.Scheduler;
|
||||
import org.apache.hadoop.hdds.utils.Scheduler;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.hadoop.hdds.scm.metadata;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.math.BigInteger;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
import org.apache.hadoop.hdds.utils.db.Codec;
|
||||
|
||||
/**
|
||||
* Encode and decode BigInteger.
|
||||
|
|
|
@ -24,7 +24,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
|
|||
import java.io.IOException;
|
||||
import org.apache.hadoop.hdds.protocol.proto
|
||||
.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
import org.apache.hadoop.hdds.utils.db.Codec;
|
||||
|
||||
/**
|
||||
* Codec for Persisting the DeletedBlocks.
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.hadoop.hdds.scm.metadata;
|
|||
|
||||
import com.google.common.primitives.Longs;
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
import org.apache.hadoop.hdds.utils.db.Codec;
|
||||
|
||||
/**
|
||||
* Codec for Persisting the DeletedBlocks.
|
||||
|
|
|
@ -23,11 +23,11 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
|||
import java.io.IOException;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore;
|
||||
import org.apache.hadoop.utils.db.DBStore;
|
||||
import org.apache.hadoop.utils.db.Table;
|
||||
import org.apache.hadoop.hdds.utils.db.DBStore;
|
||||
import org.apache.hadoop.hdds.utils.db.Table;
|
||||
import org.apache.hadoop.hdds.protocol.proto
|
||||
.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
|
||||
import org.apache.hadoop.utils.db.TableIterator;
|
||||
import org.apache.hadoop.hdds.utils.db.TableIterator;
|
||||
|
||||
/**
|
||||
* Generic interface for data stores for SCM.
|
||||
|
|
|
@ -27,12 +27,12 @@ import java.io.IOException;
|
|||
import org.apache.hadoop.hdds.security.x509.certificate.authority
|
||||
.CertificateStore;
|
||||
import org.apache.hadoop.hdds.server.ServerUtils;
|
||||
import org.apache.hadoop.utils.db.DBStore;
|
||||
import org.apache.hadoop.utils.db.DBStoreBuilder;
|
||||
import org.apache.hadoop.utils.db.Table;
|
||||
import org.apache.hadoop.hdds.utils.db.DBStore;
|
||||
import org.apache.hadoop.hdds.utils.db.DBStoreBuilder;
|
||||
import org.apache.hadoop.hdds.utils.db.Table;
|
||||
import org.apache.hadoop.hdds.protocol.proto
|
||||
.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
|
||||
import org.apache.hadoop.utils.db.TableIterator;
|
||||
import org.apache.hadoop.hdds.utils.db.TableIterator;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -126,7 +126,7 @@ public class SCMMetadataStoreRDBImpl implements SCMMetadataStore {
|
|||
}
|
||||
|
||||
@Override
|
||||
public org.apache.hadoop.utils.db.DBStore getStore() {
|
||||
public DBStore getStore() {
|
||||
return this.store;
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ import java.security.cert.CertificateException;
|
|||
import java.security.cert.X509Certificate;
|
||||
import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
|
||||
import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
import org.apache.hadoop.hdds.utils.db.Codec;
|
||||
|
||||
/**
|
||||
* Encodes and Decodes X509Certificate Class.
|
||||
|
|
|
@ -21,7 +21,7 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
|
||||
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
|
||||
import org.apache.hadoop.ozone.OzoneConfigKeys;
|
||||
import org.apache.hadoop.utils.Scheduler;
|
||||
import org.apache.hadoop.hdds.utils.Scheduler;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -33,10 +33,10 @@ import org.apache.hadoop.hdds.server.ServerUtils;
|
|||
import org.apache.hadoop.hdds.server.events.EventPublisher;
|
||||
import org.apache.hadoop.metrics2.util.MBeans;
|
||||
import org.apache.hadoop.ozone.OzoneConsts;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.utils.MetadataStore;
|
||||
import org.apache.hadoop.utils.MetadataStoreBuilder;
|
||||
import org.apache.hadoop.utils.Scheduler;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStore;
|
||||
import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
|
||||
import org.apache.hadoop.hdds.utils.Scheduler;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ import java.util.concurrent.locks.ReentrantLock;
|
|||
import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore;
|
||||
import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
|
||||
import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore;
|
||||
import org.apache.hadoop.utils.db.BatchOperation;
|
||||
import org.apache.hadoop.hdds.utils.db.BatchOperation;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ import org.apache.hadoop.security.UserGroupInformation;
|
|||
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
|
||||
import org.apache.hadoop.security.authentication.client.AuthenticationException;
|
||||
import org.apache.hadoop.util.JvmPauseMonitor;
|
||||
import org.apache.hadoop.utils.HddsVersionInfo;
|
||||
import org.apache.hadoop.hdds.utils.HddsVersionInfo;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
|
|
@ -43,9 +43,9 @@ import org.apache.hadoop.hdds.protocol.proto
|
|||
.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto
|
||||
.DeleteBlockTransactionResult;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.utils.db.Table;
|
||||
import org.apache.hadoop.utils.db.TableIterator;
|
||||
import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
|
||||
import org.apache.hadoop.hdds.utils.db.Table;
|
||||
import org.apache.hadoop.hdds.utils.db.TableIterator;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
|
|
|
@ -30,8 +30,8 @@ import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
|||
import org.apache.hadoop.ozone.om.lock.OzoneManagerLock;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.VolumeList;
|
||||
import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
|
||||
import org.apache.hadoop.utils.db.DBStore;
|
||||
import org.apache.hadoop.utils.db.Table;
|
||||
import org.apache.hadoop.hdds.utils.db.DBStore;
|
||||
import org.apache.hadoop.hdds.utils.db.Table;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.om.codec;
|
|||
import java.io.IOException;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmBucketInfo;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.BucketInfo;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
import org.apache.hadoop.hdds.utils.db.Codec;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.om.codec;
|
|||
import java.io.IOException;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.KeyInfo;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
import org.apache.hadoop.hdds.utils.db.Codec;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
|
|
|
@ -22,7 +22,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
|
|||
import java.io.IOException;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmMultipartKeyInfo;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
|
||||
import org.apache.hadoop.utils.db.Codec;
|
||||
import org.apache.hadoop.hdds.utils.db.Codec;
|
||||
|
||||
|
||||
/**
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue