HDDS-2143. Rename classes under package org.apache.hadoop.utils

Closes #1465
This commit is contained in:
Bharat Viswanadham 2019-09-18 16:51:39 +02:00 committed by Márton Elek
parent 285ed0a849
commit 6d4b20c047
No known key found for this signature in database
GPG Key ID: D51EA8F00EE79B28
227 changed files with 392 additions and 399 deletions

View File

@ -16,7 +16,7 @@
*/ */
package org.apache.hadoop.hdds; package org.apache.hadoop.hdds;
import org.apache.hadoop.utils.db.DBProfile; import org.apache.hadoop.hdds.utils.db.DBProfile;
/** /**
* This class contains constants for configuration keys and default values * This class contains constants for configuration keys and default values

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hdds.cli; package org.apache.hadoop.hdds.cli;
import org.apache.hadoop.utils.HddsVersionInfo; import org.apache.hadoop.hdds.utils.HddsVersionInfo;
import picocli.CommandLine.IVersionProvider; import picocli.CommandLine.IVersionProvider;

View File

@ -15,7 +15,7 @@
* the License. * the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;

View File

@ -15,7 +15,7 @@
* the License. * the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;

View File

@ -15,7 +15,7 @@
* the License. * the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import java.util.PriorityQueue; import java.util.PriorityQueue;

View File

@ -15,7 +15,7 @@
* the License. * the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
/** /**
* Result of a {@link BackgroundTask}. * Result of a {@link BackgroundTask}.

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import java.io.IOException; import java.io.IOException;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;

View File

@ -16,10 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter; import org.apache.hadoop.hdds.utils.MetadataKeyFilters.MetadataKeyFilter;
import org.fusesource.leveldbjni.JniDBFactory; import org.fusesource.leveldbjni.JniDBFactory;
import org.iq80.leveldb.DB; import org.iq80.leveldb.DB;
import org.iq80.leveldb.DBIterator; import org.iq80.leveldb.DBIterator;

View File

@ -16,19 +16,16 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import org.iq80.leveldb.DBIterator; import org.iq80.leveldb.DBIterator;
import java.util.Map; import java.util.Map;
import java.util.NoSuchElementException; import java.util.NoSuchElementException;
import org.apache.hadoop.utils.MetadataStore.KeyValue;
/** /**
* LevelDB store iterator. * LevelDB store iterator.
*/ */
public class LevelDBStoreIterator implements MetaStoreIterator<KeyValue> { public class LevelDBStoreIterator implements MetaStoreIterator< MetadataStore.KeyValue > {
private DBIterator levelDBIterator; private DBIterator levelDBIterator;
@ -44,10 +41,10 @@ public class LevelDBStoreIterator implements MetaStoreIterator<KeyValue> {
} }
@Override @Override
public KeyValue next() { public MetadataStore.KeyValue next() {
if(levelDBIterator.hasNext()) { if(levelDBIterator.hasNext()) {
Map.Entry<byte[], byte[]> entry = levelDBIterator.next(); Map.Entry<byte[], byte[]> entry = levelDBIterator.next();
return KeyValue.create(entry.getKey(), entry.getValue()); return MetadataStore.KeyValue.create(entry.getKey(), entry.getValue());
} }
throw new NoSuchElementException("LevelDB Store has no more elements"); throw new NoSuchElementException("LevelDB Store has no more elements");
} }

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import java.util.Iterator; import java.util.Iterator;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.base.Strings; import com.google.common.base.Strings;

View File

@ -16,11 +16,11 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter; import org.apache.hadoop.hdds.utils.MetadataKeyFilters.MetadataKeyFilter;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryPolicy;
import org.apache.hadoop.util.ThreadUtil; import org.apache.hadoop.util.ThreadUtil;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;

View File

@ -17,18 +17,16 @@
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import org.rocksdb.RocksIterator; import org.rocksdb.RocksIterator;
import java.util.NoSuchElementException; import java.util.NoSuchElementException;
import org.apache.hadoop.utils.MetadataStore.KeyValue;
/** /**
* RocksDB store iterator. * RocksDB store iterator.
*/ */
public class RocksDBStoreIterator implements MetaStoreIterator<KeyValue> { public class RocksDBStoreIterator implements MetaStoreIterator< MetadataStore.KeyValue > {
private RocksIterator rocksDBIterator; private RocksIterator rocksDBIterator;
@ -43,9 +41,9 @@ public class RocksDBStoreIterator implements MetaStoreIterator<KeyValue> {
} }
@Override @Override
public KeyValue next() { public MetadataStore.KeyValue next() {
if (rocksDBIterator.isValid()) { if (rocksDBIterator.isValid()) {
KeyValue value = KeyValue.create(rocksDBIterator.key(), rocksDBIterator MetadataStore.KeyValue value = MetadataStore.KeyValue.create(rocksDBIterator.key(), rocksDBIterator
.value()); .value());
rocksDBIterator.next(); rocksDBIterator.next();
return value; return value;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import org.apache.ratis.util.function.CheckedRunnable; import org.apache.ratis.util.function.CheckedRunnable;
import org.slf4j.Logger; import org.slf4j.Logger;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import org.apache.hadoop.hdds.HddsUtils; import org.apache.hadoop.hdds.HddsUtils;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
/** /**
* Class represents a batch operation, collects multiple db operation. * Class represents a batch operation, collects multiple db operation.

View File

@ -16,9 +16,9 @@
* limitations under the License. * limitations under the License.
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import org.apache.hadoop.utils.db.Table.KeyValue; import org.apache.hadoop.hdds.utils.db.Table.KeyValue;
/** /**
* Key value for raw Table implementations. * Key value for raw Table implementations.

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap; import java.util.HashMap;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.eclipse.jetty.util.StringUtil; import org.eclipse.jetty.util.StringUtil;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import org.apache.hadoop.conf.StorageUnit; import org.apache.hadoop.conf.StorageUnit;
import org.rocksdb.BlockBasedTableConfig; import org.rocksdb.BlockBasedTableConfig;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -25,7 +25,7 @@ import java.util.ArrayList;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.utils.db.cache.TableCacheImpl; import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl;
/** /**
* The DBStore interface provides the ability to create Tables, which store * The DBStore interface provides the ability to create Tables, which store

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import static org.apache.hadoop.ozone.OzoneConsts.OM_DB_CHECKPOINTS_DIR_NAME; import static org.apache.hadoop.ozone.OzoneConsts.OM_DB_CHECKPOINTS_DIR_NAME;
@ -33,12 +33,12 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.hadoop.hdds.HddsUtils; import org.apache.hadoop.hdds.HddsUtils;
import org.apache.hadoop.hdds.utils.RocksDBStoreMBean;
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.utils.RocksDBStoreMBean;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.hadoop.utils.db.cache.TableCacheImpl; import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl;
import org.apache.ratis.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.ratis.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.rocksdb.ColumnFamilyDescriptor; import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle; import org.rocksdb.ColumnFamilyHandle;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;
import java.util.NoSuchElementException; import java.util.NoSuchElementException;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Path; import java.nio.file.Path;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
@ -25,8 +25,8 @@ import java.util.Map;
import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.utils.db.cache.CacheKey; import org.apache.hadoop.hdds.utils.db.cache.CacheKey;
import org.apache.hadoop.utils.db.cache.CacheValue; import org.apache.hadoop.hdds.utils.db.cache.CacheValue;
/** /**
* Interface for key-value store that stores ozone metadata. Ozone metadata is * Interface for key-value store that stores ozone metadata. Ozone metadata is
* stored as key value pairs, both key and value are arbitrary byte arrays. Each * stored as key value pairs, both key and value are arbitrary byte arrays. Each

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
@ -24,15 +24,15 @@ import java.util.Map;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional; import com.google.common.base.Optional;
import org.apache.hadoop.utils.db.cache.CacheKey; import org.apache.hadoop.hdds.utils.db.cache.CacheKey;
import org.apache.hadoop.utils.db.cache.CacheResult; import org.apache.hadoop.hdds.utils.db.cache.CacheResult;
import org.apache.hadoop.utils.db.cache.CacheValue; import org.apache.hadoop.hdds.utils.db.cache.CacheValue;
import org.apache.hadoop.utils.db.cache.TableCacheImpl; import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl;
import org.apache.hadoop.utils.db.cache.TableCache; import org.apache.hadoop.hdds.utils.db.cache.TableCache;
import org.apache.hadoop.utils.db.cache.TableCacheImpl.CacheCleanupPolicy; import org.apache.hadoop.hdds.utils.db.cache.TableCacheImpl.CacheCleanupPolicy;
import static org.apache.hadoop.utils.db.cache.CacheResult.CacheStatus.EXISTS; import static org.apache.hadoop.hdds.utils.db.cache.CacheResult.CacheStatus.EXISTS;
import static org.apache.hadoop.utils.db.cache.CacheResult.CacheStatus.NOT_EXIST; import static org.apache.hadoop.hdds.utils.db.cache.CacheResult.CacheStatus.NOT_EXIST;
/** /**
* Strongly typed table implementation. * Strongly typed table implementation.
* <p> * <p>

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils.db.cache; package org.apache.hadoop.hdds.utils.db.cache;
import java.util.Objects; import java.util.Objects;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils.db.cache; package org.apache.hadoop.hdds.utils.db.cache;
import java.util.Objects; import java.util.Objects;

View File

@ -16,7 +16,7 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils.db.cache; package org.apache.hadoop.hdds.utils.db.cache;
import com.google.common.base.Optional; import com.google.common.base.Optional;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db.cache; package org.apache.hadoop.hdds.utils.db.cache;
import java.util.Objects; import java.util.Objects;

View File

@ -17,12 +17,11 @@
* *
*/ */
package org.apache.hadoop.utils.db.cache; package org.apache.hadoop.hdds.utils.db.cache;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.classification.InterfaceStability.Evolving;
import org.apache.hadoop.utils.db.cache.CacheResult.CacheStatus;
import org.apache.hadoop.utils.db.cache.TableCacheImpl.CacheCleanupPolicy;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
@ -78,15 +77,15 @@ public interface TableCache<CACHEKEY extends CacheKey,
* Check key exist in cache or not. * Check key exist in cache or not.
* *
* If it exists return CacheResult with value and status as * If it exists return CacheResult with value and status as
* {@link CacheStatus#EXISTS} * {@link CacheResult.CacheStatus#EXISTS}
* *
* If it does not exist: * If it does not exist:
* If cache clean up policy is * If cache clean up policy is
* {@link TableCacheImpl.CacheCleanupPolicy#NEVER} it means table cache is * {@link TableCacheImpl.CacheCleanupPolicy#NEVER} it means table cache is
* full cache. It return's {@link CacheResult} with null * full cache. It return's {@link CacheResult} with null
* and status as {@link CacheStatus#NOT_EXIST}. * and status as {@link CacheResult.CacheStatus#NOT_EXIST}.
* *
* If cache clean up policy is {@link CacheCleanupPolicy#MANUAL} it means * If cache clean up policy is {@link TableCacheImpl.CacheCleanupPolicy#MANUAL} it means
* table cache is partial cache. It return's {@link CacheResult} with * table cache is partial cache. It return's {@link CacheResult} with
* null and status as MAY_EXIST. * null and status as MAY_EXIST.
* *

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db.cache; package org.apache.hadoop.hdds.utils.db.cache;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;

View File

@ -15,4 +15,4 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils.db.cache; package org.apache.hadoop.hdds.utils.db.cache;

View File

@ -19,4 +19,4 @@
/** /**
* Database interfaces for Ozone. * Database interfaces for Ozone.
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;

View File

@ -15,4 +15,4 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;

View File

@ -14,7 +14,7 @@
* License for the specific language governing permissions and limitations under * License for the specific language governing permissions and limitations under
* the License. * the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
@ -25,9 +25,8 @@ import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.DFSUtilClient;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter; import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
import org.apache.hadoop.utils.MetadataKeyFilters.MetadataKeyFilter; import org.apache.hadoop.hdds.utils.MetadataKeyFilters.MetadataKeyFilter;
import org.apache.hadoop.utils.MetadataStore.KeyValue;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
@ -122,7 +121,7 @@ public class TestMetadataStore {
//As database is empty, check whether iterator is working as expected or //As database is empty, check whether iterator is working as expected or
// not. // not.
MetaStoreIterator<KeyValue> metaStoreIterator = dbStore.iterator(); MetaStoreIterator< MetadataStore.KeyValue > metaStoreIterator = dbStore.iterator();
assertFalse(metaStoreIterator.hasNext()); assertFalse(metaStoreIterator.hasNext());
try { try {
metaStoreIterator.next(); metaStoreIterator.next();
@ -140,7 +139,7 @@ public class TestMetadataStore {
int i = 0; int i = 0;
while (metaStoreIterator.hasNext()) { while (metaStoreIterator.hasNext()) {
KeyValue val = metaStoreIterator.next(); MetadataStore.KeyValue val = metaStoreIterator.next();
assertEquals("a" + i, getString(val.getKey())); assertEquals("a" + i, getString(val.getKey()));
assertEquals("a-value" + i, getString(val.getValue())); assertEquals("a-value" + i, getString(val.getValue()));
i++; i++;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertThrows;

View File

@ -15,7 +15,7 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.conf.OzoneConfiguration;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;
@ -40,7 +40,7 @@ import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import static org.apache.hadoop.utils.db.DBConfigFromFile.getOptionsFileNameFromDB; import static org.apache.hadoop.hdds.utils.db.DBConfigFromFile.getOptionsFileNameFromDB;
/** /**
* DBConf tests. * DBConf tests.

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import javax.management.MBeanServer; import javax.management.MBeanServer;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
@ -29,11 +29,11 @@ import java.util.Set;
import com.google.common.base.Optional; import com.google.common.base.Optional;
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.utils.db.Table.KeyValue; import org.apache.hadoop.hdds.utils.db.Table.KeyValue;
import org.apache.commons.lang3.RandomStringUtils; import org.apache.commons.lang3.RandomStringUtils;
import org.apache.hadoop.utils.db.cache.CacheKey; import org.apache.hadoop.hdds.utils.db.cache.CacheKey;
import org.apache.hadoop.utils.db.cache.CacheValue; import org.apache.hadoop.hdds.utils.db.cache.CacheValue;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;

View File

@ -17,7 +17,7 @@
* *
*/ */
package org.apache.hadoop.utils.db.cache; package org.apache.hadoop.hdds.utils.db.cache;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;

View File

@ -19,4 +19,4 @@
/** /**
* Tests for the DB Cache Utilities. * Tests for the DB Cache Utilities.
*/ */
package org.apache.hadoop.utils.db.cache; package org.apache.hadoop.hdds.utils.db.cache;

View File

@ -19,4 +19,4 @@
/** /**
* Tests for the DB Utilities. * Tests for the DB Utilities.
*/ */
package org.apache.hadoop.utils.db; package org.apache.hadoop.hdds.utils.db;

View File

@ -19,4 +19,4 @@
/** /**
* DB test Utils. * DB test Utils.
*/ */
package org.apache.hadoop.utils; package org.apache.hadoop.hdds.utils;

View File

@ -47,7 +47,7 @@ import org.apache.hadoop.ozone.protocol.commands.DeleteBlockCommandStatus;
import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand; import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
import org.apache.hadoop.ozone.protocol.commands.SCMCommand; import org.apache.hadoop.ozone.protocol.commands.SCMCommand;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.utils.BatchOperation; import org.apache.hadoop.hdds.utils.BatchOperation;
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB; import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -23,8 +23,8 @@ import org.apache.commons.collections.MapIterator;
import org.apache.commons.collections.map.LRUMap; import org.apache.commons.collections.map.LRUMap;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.hdds.utils.MetadataStore;
import org.apache.hadoop.utils.MetadataStoreBuilder; import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.ozone.container.common.utils;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.lang.exception.ExceptionUtils; import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.hdds.utils.MetadataStore;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -28,11 +28,11 @@ import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml;
import org.apache.hadoop.ozone.container.common.interfaces.BlockIterator; import org.apache.hadoop.ozone.container.common.interfaces.BlockIterator;
import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils; import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
import org.apache.hadoop.ozone.container.keyvalue.helpers.KeyValueContainerLocationUtil; import org.apache.hadoop.ozone.container.keyvalue.helpers.KeyValueContainerLocationUtil;
import org.apache.hadoop.utils.MetaStoreIterator; import org.apache.hadoop.hdds.utils.MetaStoreIterator;
import org.apache.hadoop.utils.MetadataKeyFilters; import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter; import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB; import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
import org.apache.hadoop.utils.MetadataStore.KeyValue; import org.apache.hadoop.hdds.utils.MetadataStore.KeyValue;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -36,9 +36,9 @@ import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils; import org.apache.hadoop.ozone.container.common.helpers.ContainerUtils;
import org.apache.hadoop.ozone.container.common.helpers.BlockData; import org.apache.hadoop.ozone.container.common.helpers.BlockData;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData; import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
import org.apache.hadoop.utils.MetadataKeyFilters; import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.hdds.utils.MetadataStore;
import org.apache.hadoop.utils.MetadataStoreBuilder; import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;

View File

@ -33,8 +33,8 @@ import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
import org.apache.hadoop.ozone.container.common.interfaces.Container; import org.apache.hadoop.ozone.container.common.interfaces.Container;
import org.apache.hadoop.ozone.container.keyvalue.interfaces.BlockManager; import org.apache.hadoop.ozone.container.keyvalue.interfaces.BlockManager;
import org.apache.hadoop.ozone.container.common.utils.ContainerCache; import org.apache.hadoop.ozone.container.common.utils.ContainerCache;
import org.apache.hadoop.utils.BatchOperation; import org.apache.hadoop.hdds.utils.BatchOperation;
import org.apache.hadoop.utils.MetadataKeyFilters; import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB; import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -40,12 +40,12 @@ import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos; import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.utils.BackgroundService; import org.apache.hadoop.hdds.utils.BackgroundService;
import org.apache.hadoop.utils.BackgroundTask; import org.apache.hadoop.hdds.utils.BackgroundTask;
import org.apache.hadoop.utils.BackgroundTaskQueue; import org.apache.hadoop.hdds.utils.BackgroundTaskQueue;
import org.apache.hadoop.utils.BackgroundTaskResult; import org.apache.hadoop.hdds.utils.BackgroundTaskResult;
import org.apache.hadoop.utils.BatchOperation; import org.apache.hadoop.hdds.utils.BatchOperation;
import org.apache.hadoop.utils.MetadataKeyFilters.KeyPrefixFilter; import org.apache.hadoop.hdds.utils.MetadataKeyFilters.KeyPrefixFilter;
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB; import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -40,7 +40,7 @@ import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml; import org.apache.hadoop.ozone.container.common.impl.ContainerDataYaml;
import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils; import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
import org.apache.hadoop.ozone.container.keyvalue.helpers.KeyValueContainerUtil; import org.apache.hadoop.ozone.container.keyvalue.helpers.KeyValueContainerUtil;
import org.apache.hadoop.utils.MetadataKeyFilters; import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB; import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -23,8 +23,8 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.container.common.utils.ContainerCache; import org.apache.hadoop.ozone.container.common.utils.ContainerCache;
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB; import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.hdds.utils.MetadataStore;
import org.apache.hadoop.utils.MetadataStoreBuilder; import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;

View File

@ -33,7 +33,7 @@ import org.apache.hadoop.ozone.container.common.volume.RoundRobinVolumeChoosingP
import org.apache.hadoop.ozone.container.common.volume.VolumeSet; import org.apache.hadoop.ozone.container.common.volume.VolumeSet;
import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils; import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.utils.MetadataKeyFilters; import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB; import org.apache.hadoop.ozone.container.common.utils.ReferenceCountedDB;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hdds.server; package org.apache.hadoop.hdds.server;
import static org.apache.hadoop.utils.RocksDBStoreMBean.ROCKSDB_CONTEXT_PREFIX; import static org.apache.hadoop.hdds.utils.RocksDBStoreMBean.ROCKSDB_CONTEXT_PREFIX;
import java.io.IOException; import java.io.IOException;
import java.io.Writer; import java.io.Writer;

View File

@ -17,7 +17,7 @@
package org.apache.hadoop.hdds.server; package org.apache.hadoop.hdds.server;
import org.apache.hadoop.utils.VersionInfo; import org.apache.hadoop.hdds.utils.VersionInfo;
/** /**
* Helper base class to report the standard version and runtime information. * Helper base class to report the standard version and runtime information.

View File

@ -45,7 +45,7 @@ import org.apache.hadoop.hdds.scm.pipeline.PipelineNotFoundException;
import org.apache.hadoop.hdds.scm.server.StorageContainerManager; import org.apache.hadoop.hdds.scm.server.StorageContainerManager;
import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.utils.UniqueId; import org.apache.hadoop.hdds.utils.UniqueId;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -47,9 +47,9 @@ import org.apache.hadoop.hdds.scm.container.ContainerReplica;
import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore; import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore;
import org.apache.hadoop.hdds.server.events.EventHandler; import org.apache.hadoop.hdds.server.events.EventHandler;
import org.apache.hadoop.hdds.server.events.EventPublisher; import org.apache.hadoop.hdds.server.events.EventPublisher;
import org.apache.hadoop.utils.db.BatchOperation; import org.apache.hadoop.hdds.utils.db.BatchOperation;
import org.apache.hadoop.utils.db.Table; import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.utils.db.TableIterator; import org.apache.hadoop.hdds.utils.db.TableIterator;
import org.eclipse.jetty.util.ConcurrentHashSet; import org.eclipse.jetty.util.ConcurrentHashSet;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -30,10 +30,10 @@ import org.apache.hadoop.hdds.server.events.EventPublisher;
import org.apache.hadoop.ozone.protocol.commands.CommandForDatanode; import org.apache.hadoop.ozone.protocol.commands.CommandForDatanode;
import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand; import org.apache.hadoop.ozone.protocol.commands.DeleteBlocksCommand;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.utils.BackgroundService; import org.apache.hadoop.hdds.utils.BackgroundService;
import org.apache.hadoop.utils.BackgroundTask; import org.apache.hadoop.hdds.utils.BackgroundTask;
import org.apache.hadoop.utils.BackgroundTaskQueue; import org.apache.hadoop.hdds.utils.BackgroundTaskQueue;
import org.apache.hadoop.utils.BackgroundTaskResult.EmptyTaskResult; import org.apache.hadoop.hdds.utils.BackgroundTaskResult.EmptyTaskResult;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -33,9 +33,9 @@ import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationType;
import org.apache.hadoop.hdds.server.ServerUtils; import org.apache.hadoop.hdds.server.ServerUtils;
import org.apache.hadoop.hdds.server.events.EventPublisher; import org.apache.hadoop.hdds.server.events.EventPublisher;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.utils.BatchOperation; import org.apache.hadoop.hdds.utils.BatchOperation;
import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.hdds.utils.MetadataStore;
import org.apache.hadoop.utils.MetadataStoreBuilder; import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -28,7 +28,7 @@ import org.apache.hadoop.metrics2.util.MBeans;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.utils.Scheduler; import org.apache.hadoop.hdds.utils.Scheduler;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hdds.scm.metadata;
import java.io.IOException; import java.io.IOException;
import java.math.BigInteger; import java.math.BigInteger;
import org.apache.hadoop.utils.db.Codec; import org.apache.hadoop.hdds.utils.db.Codec;
/** /**
* Encode and decode BigInteger. * Encode and decode BigInteger.

View File

@ -24,7 +24,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hdds.protocol.proto import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
import org.apache.hadoop.utils.db.Codec; import org.apache.hadoop.hdds.utils.db.Codec;
/** /**
* Codec for Persisting the DeletedBlocks. * Codec for Persisting the DeletedBlocks.

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hdds.scm.metadata;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.utils.db.Codec; import org.apache.hadoop.hdds.utils.db.Codec;
/** /**
* Codec for Persisting the DeletedBlocks. * Codec for Persisting the DeletedBlocks.

View File

@ -23,11 +23,11 @@ import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import java.io.IOException; import java.io.IOException;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore; import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore;
import org.apache.hadoop.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.DBStore;
import org.apache.hadoop.utils.db.Table; import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.hdds.protocol.proto import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
import org.apache.hadoop.utils.db.TableIterator; import org.apache.hadoop.hdds.utils.db.TableIterator;
/** /**
* Generic interface for data stores for SCM. * Generic interface for data stores for SCM.

View File

@ -27,12 +27,12 @@ import java.io.IOException;
import org.apache.hadoop.hdds.security.x509.certificate.authority import org.apache.hadoop.hdds.security.x509.certificate.authority
.CertificateStore; .CertificateStore;
import org.apache.hadoop.hdds.server.ServerUtils; import org.apache.hadoop.hdds.server.ServerUtils;
import org.apache.hadoop.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.DBStore;
import org.apache.hadoop.utils.db.DBStoreBuilder; import org.apache.hadoop.hdds.utils.db.DBStoreBuilder;
import org.apache.hadoop.utils.db.Table; import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.hdds.protocol.proto import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction; .StorageContainerDatanodeProtocolProtos.DeletedBlocksTransaction;
import org.apache.hadoop.utils.db.TableIterator; import org.apache.hadoop.hdds.utils.db.TableIterator;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -126,7 +126,7 @@ public class SCMMetadataStoreRDBImpl implements SCMMetadataStore {
} }
@Override @Override
public org.apache.hadoop.utils.db.DBStore getStore() { public DBStore getStore() {
return this.store; return this.store;
} }

View File

@ -25,7 +25,7 @@ import java.security.cert.CertificateException;
import java.security.cert.X509Certificate; import java.security.cert.X509Certificate;
import org.apache.hadoop.hdds.security.exception.SCMSecurityException; import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec; import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
import org.apache.hadoop.utils.db.Codec; import org.apache.hadoop.hdds.utils.db.Codec;
/** /**
* Encodes and Decodes X509Certificate Class. * Encodes and Decodes X509Certificate Class.

View File

@ -21,7 +21,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos; import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.utils.Scheduler; import org.apache.hadoop.hdds.utils.Scheduler;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -33,10 +33,10 @@ import org.apache.hadoop.hdds.server.ServerUtils;
import org.apache.hadoop.hdds.server.events.EventPublisher; import org.apache.hadoop.hdds.server.events.EventPublisher;
import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.utils.MetadataKeyFilters; import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
import org.apache.hadoop.utils.MetadataStore; import org.apache.hadoop.hdds.utils.MetadataStore;
import org.apache.hadoop.utils.MetadataStoreBuilder; import org.apache.hadoop.hdds.utils.MetadataStoreBuilder;
import org.apache.hadoop.utils.Scheduler; import org.apache.hadoop.hdds.utils.Scheduler;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -27,7 +27,7 @@ import java.util.concurrent.locks.ReentrantLock;
import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore; import org.apache.hadoop.hdds.scm.metadata.SCMMetadataStore;
import org.apache.hadoop.hdds.security.exception.SCMSecurityException; import org.apache.hadoop.hdds.security.exception.SCMSecurityException;
import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore; import org.apache.hadoop.hdds.security.x509.certificate.authority.CertificateStore;
import org.apache.hadoop.utils.db.BatchOperation; import org.apache.hadoop.hdds.utils.db.BatchOperation;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -99,7 +99,7 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.util.JvmPauseMonitor; import org.apache.hadoop.util.JvmPauseMonitor;
import org.apache.hadoop.utils.HddsVersionInfo; import org.apache.hadoop.hdds.utils.HddsVersionInfo;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -43,9 +43,9 @@ import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto .StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto
.DeleteBlockTransactionResult; .DeleteBlockTransactionResult;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.utils.MetadataKeyFilters; import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
import org.apache.hadoop.utils.db.Table; import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.utils.db.TableIterator; import org.apache.hadoop.hdds.utils.db.TableIterator;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;

View File

@ -30,8 +30,8 @@ import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
import org.apache.hadoop.ozone.om.lock.OzoneManagerLock; import org.apache.hadoop.ozone.om.lock.OzoneManagerLock;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.VolumeList; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.VolumeList;
import org.apache.hadoop.ozone.security.OzoneTokenIdentifier; import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
import org.apache.hadoop.utils.db.DBStore; import org.apache.hadoop.hdds.utils.db.DBStore;
import org.apache.hadoop.utils.db.Table; import org.apache.hadoop.hdds.utils.db.Table;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.om.codec;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.ozone.om.helpers.OmBucketInfo; import org.apache.hadoop.ozone.om.helpers.OmBucketInfo;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.BucketInfo; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.BucketInfo;
import org.apache.hadoop.utils.db.Codec; import org.apache.hadoop.hdds.utils.db.Codec;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.InvalidProtocolBufferException;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.ozone.om.codec;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.ozone.om.helpers.OmKeyInfo; import org.apache.hadoop.ozone.om.helpers.OmKeyInfo;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.KeyInfo; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.KeyInfo;
import org.apache.hadoop.utils.db.Codec; import org.apache.hadoop.hdds.utils.db.Codec;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.InvalidProtocolBufferException;

View File

@ -22,7 +22,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.ozone.om.helpers.OmMultipartKeyInfo; import org.apache.hadoop.ozone.om.helpers.OmMultipartKeyInfo;
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos; import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
import org.apache.hadoop.utils.db.Codec; import org.apache.hadoop.hdds.utils.db.Codec;
/** /**

Some files were not shown because too many files have changed in this diff Show More