HBASE-20594 Provide utility to get table desc delta
Ammending Author: Mike Drob <mdrob@apache.org> Signed-off-by: Sean Busbey <busbey@apache.org> Signed-off-by: Apekshit Sharma <appy@apache.org>
This commit is contained in:
parent
06611256ee
commit
92460c58aa
|
@ -0,0 +1,99 @@
|
|||
/**
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
|
||||
|
||||
@InterfaceAudience.Public
|
||||
public final class TableDescriptorUtils {
|
||||
public final static class TableDescriptorDelta {
|
||||
private final Set<byte[]> columnsAdded;
|
||||
private final Set<byte[]> columnsDeleted;
|
||||
private final Set<byte[]> columnsModified;
|
||||
|
||||
private TableDescriptorDelta(TableDescriptor oldTD, TableDescriptor newTD) {
|
||||
Preconditions.checkNotNull(oldTD);
|
||||
Preconditions.checkNotNull(newTD);
|
||||
|
||||
Map<byte[], ColumnFamilyDescriptor> oldCFs = new TreeMap<>(Bytes.BYTES_COMPARATOR);
|
||||
Set<byte[]> newCFs = new TreeSet<>(Bytes.BYTES_COMPARATOR);
|
||||
|
||||
// CFD -> (name, CFD)
|
||||
for (ColumnFamilyDescriptor cfd : oldTD.getColumnFamilies()) {
|
||||
oldCFs.put(cfd.getName(), cfd);
|
||||
}
|
||||
|
||||
Set<byte[]> added = new TreeSet<>(Bytes.BYTES_COMPARATOR);
|
||||
Set<byte[]> modified = new TreeSet<>(Bytes.BYTES_COMPARATOR);
|
||||
|
||||
for (ColumnFamilyDescriptor cfd : newTD.getColumnFamilies()) {
|
||||
byte[] cfName = cfd.getName();
|
||||
newCFs.add(cfName);
|
||||
|
||||
if (!oldCFs.containsKey(cfName)) {
|
||||
// If column family is in newTD but not oldTD, then it was added
|
||||
added.add(cfName);
|
||||
} else if (!cfd.equals(oldCFs.get(cfName))) {
|
||||
// If column family is in both, but not equal, then it was modified
|
||||
modified.add(cfName);
|
||||
}
|
||||
}
|
||||
|
||||
// If column family is in oldTD, but not in newTD, then it got deleted.
|
||||
Set<byte[]> deleted = oldCFs.keySet();
|
||||
deleted.removeAll(newCFs);
|
||||
|
||||
columnsAdded = Collections.unmodifiableSet(added);
|
||||
columnsDeleted = Collections.unmodifiableSet(deleted);
|
||||
columnsModified = Collections.unmodifiableSet(modified);
|
||||
}
|
||||
|
||||
public Set<byte[]> getColumnsAdded() {
|
||||
return columnsAdded;
|
||||
}
|
||||
|
||||
public Set<byte[]> getColumnsDeleted() {
|
||||
return columnsDeleted;
|
||||
}
|
||||
|
||||
public Set<byte[]> getColumnsModified() {
|
||||
return columnsModified;
|
||||
}
|
||||
}
|
||||
|
||||
private TableDescriptorUtils() { }
|
||||
|
||||
/**
|
||||
* Compares two {@link TableDescriptor} and indicate which columns were added, deleted,
|
||||
* or modified from oldTD to newTD
|
||||
* @return a TableDescriptorDelta that contains the added/deleted/modified column names
|
||||
*/
|
||||
public static TableDescriptorDelta computeDelta(TableDescriptor oldTD, TableDescriptor newTD) {
|
||||
return new TableDescriptorDelta(oldTD, newTD);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,86 @@
|
|||
/**
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptorUtils.TableDescriptorDelta;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestTableDescriptorUtils {
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
HBaseClassTestRule.forClass(TestTableDescriptorUtils.class);
|
||||
|
||||
@Test
|
||||
public void testDelta() {
|
||||
ColumnFamilyDescriptor cf1 = ColumnFamilyDescriptorBuilder.of("cf1");
|
||||
ColumnFamilyDescriptor cf2 = ColumnFamilyDescriptorBuilder.of("cf2");
|
||||
ColumnFamilyDescriptor cf3 = ColumnFamilyDescriptorBuilder.of("cf3");
|
||||
ColumnFamilyDescriptor cf4 = ColumnFamilyDescriptorBuilder.of("cf4");
|
||||
TableDescriptor td = TableDescriptorBuilder
|
||||
.newBuilder(TableName.valueOf("test"))
|
||||
.setColumnFamilies(Arrays.asList(cf1, cf2, cf3, cf4))
|
||||
.build();
|
||||
|
||||
TableDescriptorDelta selfCompare = TableDescriptorUtils.computeDelta(td, td);
|
||||
assertEquals(0, selfCompare.getColumnsAdded().size());
|
||||
assertEquals(0, selfCompare.getColumnsDeleted().size());
|
||||
assertEquals(0, selfCompare.getColumnsModified().size());
|
||||
|
||||
ColumnFamilyDescriptor modCf2 = ColumnFamilyDescriptorBuilder
|
||||
.newBuilder(cf2).setMaxVersions(5).build();
|
||||
ColumnFamilyDescriptor modCf3 = ColumnFamilyDescriptorBuilder
|
||||
.newBuilder(cf3).setMaxVersions(5).build();
|
||||
ColumnFamilyDescriptor cf5 = ColumnFamilyDescriptorBuilder.of("cf5");
|
||||
ColumnFamilyDescriptor cf6 = ColumnFamilyDescriptorBuilder.of("cf6");
|
||||
ColumnFamilyDescriptor cf7 = ColumnFamilyDescriptorBuilder.of("cf7");
|
||||
TableDescriptor newTd = TableDescriptorBuilder
|
||||
.newBuilder(td)
|
||||
.removeColumnFamily(Bytes.toBytes("cf1"))
|
||||
.modifyColumnFamily(modCf2)
|
||||
.modifyColumnFamily(modCf3)
|
||||
.setColumnFamily(cf5)
|
||||
.setColumnFamily(cf6)
|
||||
.setColumnFamily(cf7)
|
||||
.build();
|
||||
|
||||
TableDescriptorDelta delta = TableDescriptorUtils.computeDelta(td, newTd);
|
||||
|
||||
assertEquals(3, delta.getColumnsAdded().size());
|
||||
assertEquals(1, delta.getColumnsDeleted().size());
|
||||
assertEquals(2, delta.getColumnsModified().size());
|
||||
|
||||
TableDescriptorDelta inverseDelta = TableDescriptorUtils.computeDelta(newTd, td);
|
||||
|
||||
// Equality here relies on implementation detail of the returned Set being a TreeSet
|
||||
assertEquals(delta.getColumnsDeleted(), inverseDelta.getColumnsAdded());
|
||||
assertEquals(delta.getColumnsAdded(), inverseDelta.getColumnsDeleted());
|
||||
assertEquals(delta.getColumnsModified(), inverseDelta.getColumnsModified());
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue