HBASE-2869 Regularize how we log sequenceids -- sometimes its myseqid, other times its sequence id, etc. -- Found another case where we had seqid instead of sequenceid
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@978799 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
8400698734
commit
b64c40daff
|
@ -25,9 +25,7 @@ import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.NavigableSet;
|
import java.util.NavigableSet;
|
||||||
import java.util.Set;
|
|
||||||
import java.util.SortedSet;
|
import java.util.SortedSet;
|
||||||
import java.util.TreeSet;
|
|
||||||
import java.util.concurrent.CopyOnWriteArraySet;
|
import java.util.concurrent.CopyOnWriteArraySet;
|
||||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||||
|
|
||||||
|
@ -42,9 +40,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
import org.apache.hadoop.hbase.KeyValue.KeyComparator;
|
|
||||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
import org.apache.hadoop.hbase.client.Get;
|
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
import org.apache.hadoop.hbase.io.HeapSize;
|
import org.apache.hadoop.hbase.io.HeapSize;
|
||||||
import org.apache.hadoop.hbase.io.hfile.Compression;
|
import org.apache.hadoop.hbase.io.hfile.Compression;
|
||||||
|
@ -52,7 +48,6 @@ import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.ClassSize;
|
import org.apache.hadoop.hbase.util.ClassSize;
|
||||||
import org.apache.hadoop.hbase.util.EnvironmentEdge;
|
|
||||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
||||||
|
@ -670,7 +665,7 @@ public class Store implements HeapSize {
|
||||||
LOG.info("Started compaction of " + filesToCompact.size() + " file(s) in " +
|
LOG.info("Started compaction of " + filesToCompact.size() + " file(s) in " +
|
||||||
this.storeNameStr + " of " + this.region.getRegionInfo().getRegionNameAsString() +
|
this.storeNameStr + " of " + this.region.getRegionInfo().getRegionNameAsString() +
|
||||||
(references? ", hasReferences=true,": " ") + " into " +
|
(references? ", hasReferences=true,": " ") + " into " +
|
||||||
region.getTmpDir() + ", seqid=" + maxId);
|
region.getTmpDir() + ", sequenceid=" + maxId);
|
||||||
StoreFile.Writer writer = compact(filesToCompact, majorcompaction, maxId);
|
StoreFile.Writer writer = compact(filesToCompact, majorcompaction, maxId);
|
||||||
// Move the compaction into place.
|
// Move the compaction into place.
|
||||||
StoreFile sf = completeCompaction(filesToCompact, writer);
|
StoreFile sf = completeCompaction(filesToCompact, writer);
|
||||||
|
|
Loading…
Reference in New Issue