fixed merge error. adapted to new signature of BlockUtils getDB

This commit is contained in:
sdeka 2019-05-23 11:36:40 +05:30
parent 9da62f33be
commit 72bef0f6cb
1 changed files with 3 additions and 3 deletions

View File

@ -33,13 +33,13 @@
import org.apache.hadoop.ozone.container.common.impl.ContainerSet; import org.apache.hadoop.ozone.container.common.impl.ContainerSet;
import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine; import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
import org.apache.hadoop.ozone.container.common.statemachine.StateContext; import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
import org.apache.hadoop.ozone.container.common.utils.ContainerCache;
import org.apache.hadoop.ozone.container.common.volume.HddsVolume; import org.apache.hadoop.ozone.container.common.volume.HddsVolume;
import org.apache.hadoop.ozone.container.common.volume.RoundRobinVolumeChoosingPolicy; import org.apache.hadoop.ozone.container.common.volume.RoundRobinVolumeChoosingPolicy;
import org.apache.hadoop.ozone.container.common.volume.VolumeSet; import org.apache.hadoop.ozone.container.common.volume.VolumeSet;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainer; import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainer;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData; import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils; import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
import org.apache.hadoop.utils.MetadataStore;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
@ -155,7 +155,7 @@ private long addBlocks(KeyValueContainer container,
long freeBytes = container.getContainerData().getMaxSize(); long freeBytes = container.getContainerData().getMaxSize();
long containerId = container.getContainerData().getContainerID(); long containerId = container.getContainerData().getContainerID();
MetadataStore metadataStore = BlockUtils.getDB(container ContainerCache.ReferenceCountedDB db = BlockUtils.getDB(container
.getContainerData(), conf); .getContainerData(), conf);
for (int bi = 0; bi < blocks; bi++) { for (int bi = 0; bi < blocks; bi++) {
@ -173,7 +173,7 @@ private long addBlocks(KeyValueContainer container,
chunkList.add(info.getProtoBufMessage()); chunkList.add(info.getProtoBufMessage());
} }
blockData.setChunks(chunkList); blockData.setChunks(chunkList);
metadataStore.put(Longs.toByteArray(blockID.getLocalID()), db.getStore().put(Longs.toByteArray(blockID.getLocalID()),
blockData.getProtoBufMessage().toByteArray()); blockData.getProtoBufMessage().toByteArray());
} }