HBASE-23661 Reduced number of Checkstyle violations in hbase-rest
Signed-off-by: Viraj Jasani <vjasani@apache.org>
This commit is contained in:
parent
70c69ba765
commit
f6a2238889
|
@ -16,17 +16,17 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.rest.model.ScannerModel;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public abstract class ResultGenerator implements Iterator<Cell> {
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.rest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -26,13 +25,16 @@ import java.util.NoSuchElementException;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.DoNotRetryIOException;
|
||||
import org.apache.hadoop.hbase.security.AccessDeniedException;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.filter.Filter;
|
||||
import org.apache.hadoop.hbase.security.AccessDeniedException;
|
||||
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -46,11 +48,10 @@ public class RowResultGenerator extends ResultGenerator {
|
|||
public RowResultGenerator(final String tableName, final RowSpec rowspec,
|
||||
final Filter filter, final boolean cacheBlocks)
|
||||
throws IllegalArgumentException, IOException {
|
||||
Table table = RESTServlet.getInstance().getTable(tableName);
|
||||
try {
|
||||
try (Table table = RESTServlet.getInstance().getTable(tableName)) {
|
||||
Get get = new Get(rowspec.getRow());
|
||||
if (rowspec.hasColumns()) {
|
||||
for (byte[] col: rowspec.getColumns()) {
|
||||
for (byte[] col : rowspec.getColumns()) {
|
||||
byte[][] split = CellUtil.parseColumn(col);
|
||||
if (split.length == 1) {
|
||||
get.addFamily(split[0]);
|
||||
|
@ -83,8 +84,6 @@ public class RowResultGenerator extends ResultGenerator {
|
|||
if (e instanceof AccessDeniedException) {
|
||||
throw e;
|
||||
}
|
||||
} finally {
|
||||
table.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,18 +16,19 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.rest.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.http.Header;
|
||||
import org.apache.http.HttpResponse;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
/**
|
||||
* The HTTP result code, response headers, and body of an HTTP response.
|
||||
*/
|
||||
|
@ -71,12 +72,12 @@ public class Response {
|
|||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* Constructor. Note: this is not thread-safe
|
||||
*
|
||||
* @param code the HTTP response code
|
||||
* @param headers headers the HTTP response headers
|
||||
* @param resp the response
|
||||
* @param in Inputstream if the response had one.
|
||||
* Note: this is not thread-safe
|
||||
*/
|
||||
public Response(int code, Header[] headers, HttpResponse resp, InputStream in) {
|
||||
this.code = code;
|
||||
|
@ -110,7 +111,7 @@ public class Response {
|
|||
}
|
||||
|
||||
public String getHeader(String key) {
|
||||
for (Header header: headers) {
|
||||
for (Header header : headers) {
|
||||
if (header.getName().equalsIgnoreCase(key)) {
|
||||
return header.getValue();
|
||||
}
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.rest.filter;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -29,8 +28,7 @@ import javax.servlet.http.HttpServletRequest;
|
|||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public class GZIPRequestStream extends ServletInputStream
|
||||
{
|
||||
public class GZIPRequestStream extends ServletInputStream {
|
||||
private GZIPInputStream in;
|
||||
|
||||
public GZIPRequestStream(HttpServletRequest request) throws IOException {
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.rest.filter;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -29,8 +28,7 @@ import javax.servlet.http.HttpServletResponse;
|
|||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
@InterfaceAudience.Private
|
||||
public class GZIPResponseStream extends ServletOutputStream
|
||||
{
|
||||
public class GZIPResponseStream extends ServletOutputStream {
|
||||
private HttpServletResponse response;
|
||||
private GZIPOutputStream out;
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.rest.filter;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -35,15 +34,16 @@ import javax.servlet.ServletResponse;
|
|||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
|
||||
public class GzipFilter implements Filter {
|
||||
private Set<String> mimeTypes = new HashSet<>();
|
||||
|
||||
@Override
|
||||
public void init(FilterConfig filterConfig) throws ServletException {
|
||||
public void init(FilterConfig filterConfig) {
|
||||
String s = filterConfig.getInitParameter("mimeTypes");
|
||||
if (s != null) {
|
||||
StringTokenizer tok = new StringTokenizer(s, ",", false);
|
||||
|
@ -66,11 +66,11 @@ public class GzipFilter implements Filter {
|
|||
String acceptEncoding = request.getHeader("accept-encoding");
|
||||
String contentType = request.getHeader("content-type");
|
||||
if ((contentEncoding != null) &&
|
||||
(contentEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) {
|
||||
(contentEncoding.toLowerCase(Locale.ROOT).contains("gzip"))) {
|
||||
request = new GZIPRequestWrapper(request);
|
||||
}
|
||||
if (((acceptEncoding != null) &&
|
||||
(acceptEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) ||
|
||||
(acceptEncoding.toLowerCase(Locale.ROOT).contains("gzip"))) ||
|
||||
((contentType != null) && mimeTypes.contains(contentType))) {
|
||||
response = new GZIPResponseWrapper(response);
|
||||
}
|
||||
|
@ -82,5 +82,4 @@ public class GzipFilter implements Filter {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.hbase.rest.filter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
|
@ -34,10 +35,12 @@ import javax.servlet.ServletResponse;
|
|||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
/**
|
||||
* This filter provides protection against cross site request forgery (CSRF)
|
||||
|
@ -48,7 +51,6 @@ import org.apache.hadoop.conf.Configuration;
|
|||
*/
|
||||
@InterfaceAudience.Public
|
||||
public class RestCsrfPreventionFilter implements Filter {
|
||||
|
||||
private static final Logger LOG =
|
||||
LoggerFactory.getLogger(RestCsrfPreventionFilter.class);
|
||||
|
||||
|
@ -66,7 +68,7 @@ public class RestCsrfPreventionFilter implements Filter {
|
|||
private Set<Pattern> browserUserAgents;
|
||||
|
||||
@Override
|
||||
public void init(FilterConfig filterConfig) throws ServletException {
|
||||
public void init(FilterConfig filterConfig) {
|
||||
String customHeader = filterConfig.getInitParameter(CUSTOM_HEADER_PARAM);
|
||||
if (customHeader != null) {
|
||||
headerName = customHeader;
|
||||
|
@ -100,9 +102,7 @@ public class RestCsrfPreventionFilter implements Filter {
|
|||
void parseMethodsToIgnore(String mti) {
|
||||
String[] methods = mti.split(",");
|
||||
methodsToIgnore = new HashSet<>();
|
||||
for (int i = 0; i < methods.length; i++) {
|
||||
methodsToIgnore.add(methods[i]);
|
||||
}
|
||||
Collections.addAll(methodsToIgnore, methods);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -143,7 +143,6 @@ public class RestCsrfPreventionFilter implements Filter {
|
|||
* container configuration mechanisms to insert the filter.
|
||||
*/
|
||||
public interface HttpInteraction {
|
||||
|
||||
/**
|
||||
* Returns the value of a header.
|
||||
*
|
||||
|
@ -223,8 +222,7 @@ public class RestCsrfPreventionFilter implements Filter {
|
|||
* @return mapping of configuration properties to be used for filter
|
||||
* initialization
|
||||
*/
|
||||
public static Map<String, String> getFilterParams(Configuration conf,
|
||||
String confPrefix) {
|
||||
public static Map<String, String> getFilterParams(Configuration conf, String confPrefix) {
|
||||
Map<String, String> filterConfigMap = new HashMap<>();
|
||||
for (Map.Entry<String, String> entry : conf) {
|
||||
String name = entry.getKey();
|
||||
|
@ -240,9 +238,7 @@ public class RestCsrfPreventionFilter implements Filter {
|
|||
/**
|
||||
* {@link HttpInteraction} implementation for use in the servlet filter.
|
||||
*/
|
||||
private static final class ServletFilterHttpInteraction
|
||||
implements HttpInteraction {
|
||||
|
||||
private static final class ServletFilterHttpInteraction implements HttpInteraction {
|
||||
private final FilterChain chain;
|
||||
private final HttpServletRequest httpRequest;
|
||||
private final HttpServletResponse httpResponse;
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -29,13 +28,14 @@ import javax.xml.bind.annotation.XmlAccessorType;
|
|||
import javax.xml.bind.annotation.XmlElement;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
|
||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
/**
|
||||
* Representation of a grouping of cells. May contain cells from more than
|
||||
|
@ -74,7 +74,6 @@ import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
|
|||
@XmlAccessorType(XmlAccessType.FIELD)
|
||||
@InterfaceAudience.Private
|
||||
public class CellSetModel implements Serializable, ProtobufMessageHandler {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@XmlElement(name="Row")
|
||||
|
@ -113,10 +112,10 @@ public class CellSetModel implements Serializable, ProtobufMessageHandler {
|
|||
@Override
|
||||
public byte[] createProtobufOutput() {
|
||||
CellSet.Builder builder = CellSet.newBuilder();
|
||||
for (RowModel row: getRows()) {
|
||||
for (RowModel row : getRows()) {
|
||||
CellSet.Row.Builder rowBuilder = CellSet.Row.newBuilder();
|
||||
rowBuilder.setKey(ByteStringer.wrap(row.getKey()));
|
||||
for (CellModel cell: row.getCells()) {
|
||||
for (CellModel cell : row.getCells()) {
|
||||
Cell.Builder cellBuilder = Cell.newBuilder();
|
||||
cellBuilder.setColumn(ByteStringer.wrap(cell.getColumn()));
|
||||
cellBuilder.setData(ByteStringer.wrap(cell.getValue()));
|
||||
|
@ -135,9 +134,9 @@ public class CellSetModel implements Serializable, ProtobufMessageHandler {
|
|||
throws IOException {
|
||||
CellSet.Builder builder = CellSet.newBuilder();
|
||||
ProtobufUtil.mergeFrom(builder, message);
|
||||
for (CellSet.Row row: builder.getRowsList()) {
|
||||
for (CellSet.Row row : builder.getRowsList()) {
|
||||
RowModel rowModel = new RowModel(row.getKey().toByteArray());
|
||||
for (Cell cell: row.getValuesList()) {
|
||||
for (Cell cell : row.getValuesList()) {
|
||||
long timestamp = HConstants.LATEST_TIMESTAMP;
|
||||
if (cell.hasTimestamp()) {
|
||||
timestamp = cell.getTimestamp();
|
||||
|
|
|
@ -16,9 +16,10 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
|
@ -29,14 +30,13 @@ import javax.xml.bind.annotation.XmlElement;
|
|||
import javax.xml.bind.annotation.XmlElementWrapper;
|
||||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
|
||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
|
||||
import org.apache.hadoop.hbase.util.ByteStringer;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
/**
|
||||
* Representation of the status of a storage cluster:
|
||||
|
@ -96,8 +96,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
|
|||
*/
|
||||
@XmlRootElement(name="ClusterStatus")
|
||||
@InterfaceAudience.Private
|
||||
public class StorageClusterStatusModel
|
||||
implements Serializable, ProtobufMessageHandler {
|
||||
public class StorageClusterStatusModel implements Serializable, ProtobufMessageHandler {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
|
@ -499,8 +498,7 @@ public class StorageClusterStatusModel
|
|||
}
|
||||
|
||||
/**
|
||||
* @param requests the number of requests per second processed by the
|
||||
* region server
|
||||
* @param requests the number of requests per second processed by the region server
|
||||
*/
|
||||
public void setRequests(long requests) {
|
||||
this.requests = requests;
|
||||
|
@ -589,8 +587,8 @@ public class StorageClusterStatusModel
|
|||
}
|
||||
|
||||
/**
|
||||
* @return the total number of requests per second handled by the cluster in
|
||||
* the last reporting interval
|
||||
* @return the total number of requests per second handled by the cluster in the last reporting
|
||||
* interval
|
||||
*/
|
||||
@XmlAttribute
|
||||
public long getRequests() {
|
||||
|
@ -627,8 +625,7 @@ public class StorageClusterStatusModel
|
|||
}
|
||||
|
||||
/**
|
||||
* @param requests the total number of requests per second handled by the
|
||||
* cluster
|
||||
* @param requests the total number of requests per second handled by the cluster
|
||||
*/
|
||||
public void setRequests(long requests) {
|
||||
this.requests = requests;
|
||||
|
@ -641,10 +638,6 @@ public class StorageClusterStatusModel
|
|||
this.averageLoad = averageLoad;
|
||||
}
|
||||
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
* @see java.lang.Object#toString()
|
||||
*/
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
|
@ -654,7 +647,7 @@ public class StorageClusterStatusModel
|
|||
if (!liveNodes.isEmpty()) {
|
||||
sb.append(liveNodes.size());
|
||||
sb.append(" live servers\n");
|
||||
for (Node node: liveNodes) {
|
||||
for (Node node : liveNodes) {
|
||||
sb.append(" ");
|
||||
sb.append(node.name);
|
||||
sb.append(' ');
|
||||
|
@ -668,7 +661,7 @@ public class StorageClusterStatusModel
|
|||
sb.append("\n maxHeapSizeMB=");
|
||||
sb.append(node.maxHeapSizeMB);
|
||||
sb.append("\n\n");
|
||||
for (Node.Region region: node.regions) {
|
||||
for (Node.Region region : node.regions) {
|
||||
sb.append(" ");
|
||||
sb.append(Bytes.toString(region.name));
|
||||
sb.append("\n stores=");
|
||||
|
@ -704,7 +697,7 @@ public class StorageClusterStatusModel
|
|||
sb.append('\n');
|
||||
sb.append(deadNodes.size());
|
||||
sb.append(" dead servers\n");
|
||||
for (String node: deadNodes) {
|
||||
for (String node : deadNodes) {
|
||||
sb.append(" ");
|
||||
sb.append(node);
|
||||
sb.append('\n');
|
||||
|
@ -719,7 +712,7 @@ public class StorageClusterStatusModel
|
|||
builder.setRegions(regions);
|
||||
builder.setRequests(requests);
|
||||
builder.setAverageLoad(averageLoad);
|
||||
for (Node node: liveNodes) {
|
||||
for (Node node : liveNodes) {
|
||||
StorageClusterStatus.Node.Builder nodeBuilder =
|
||||
StorageClusterStatus.Node.newBuilder();
|
||||
nodeBuilder.setName(node.name);
|
||||
|
@ -727,7 +720,7 @@ public class StorageClusterStatusModel
|
|||
nodeBuilder.setRequests(node.requests);
|
||||
nodeBuilder.setHeapSizeMB(node.heapSizeMB);
|
||||
nodeBuilder.setMaxHeapSizeMB(node.maxHeapSizeMB);
|
||||
for (Node.Region region: node.regions) {
|
||||
for (Node.Region region : node.regions) {
|
||||
StorageClusterStatus.Region.Builder regionBuilder =
|
||||
StorageClusterStatus.Region.newBuilder();
|
||||
regionBuilder.setName(ByteStringer.wrap(region.name));
|
||||
|
@ -747,15 +740,14 @@ public class StorageClusterStatusModel
|
|||
}
|
||||
builder.addLiveNodes(nodeBuilder);
|
||||
}
|
||||
for (String node: deadNodes) {
|
||||
for (String node : deadNodes) {
|
||||
builder.addDeadNodes(node);
|
||||
}
|
||||
return builder.build().toByteArray();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
|
||||
throws IOException {
|
||||
public ProtobufMessageHandler getObjectFromMessage(byte[] message) throws IOException {
|
||||
StorageClusterStatus.Builder builder = StorageClusterStatus.newBuilder();
|
||||
ProtobufUtil.mergeFrom(builder, message);
|
||||
if (builder.hasRegions()) {
|
||||
|
@ -767,14 +759,14 @@ public class StorageClusterStatusModel
|
|||
if (builder.hasAverageLoad()) {
|
||||
averageLoad = builder.getAverageLoad();
|
||||
}
|
||||
for (StorageClusterStatus.Node node: builder.getLiveNodesList()) {
|
||||
for (StorageClusterStatus.Node node : builder.getLiveNodesList()) {
|
||||
long startCode = node.hasStartCode() ? node.getStartCode() : -1;
|
||||
StorageClusterStatusModel.Node nodeModel =
|
||||
addLiveNode(node.getName(), startCode, node.getHeapSizeMB(),
|
||||
node.getMaxHeapSizeMB());
|
||||
long requests = node.hasRequests() ? node.getRequests() : 0;
|
||||
nodeModel.setRequests(requests);
|
||||
for (StorageClusterStatus.Region region: node.getRegionsList()) {
|
||||
for (StorageClusterStatus.Region region : node.getRegionsList()) {
|
||||
nodeModel.addRegion(
|
||||
region.getName().toByteArray(),
|
||||
region.getStores(),
|
||||
|
@ -791,7 +783,7 @@ public class StorageClusterStatusModel
|
|||
region.getCurrentCompactedKVs());
|
||||
}
|
||||
}
|
||||
for (String node: builder.getDeadNodesList()) {
|
||||
for (String node : builder.getDeadNodesList()) {
|
||||
addDeadNode(node);
|
||||
}
|
||||
return this;
|
||||
|
|
|
@ -16,14 +16,17 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase.rest.model;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonAnyGetter;
|
||||
import com.fasterxml.jackson.annotation.JsonAnySetter;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -33,20 +36,16 @@ import javax.xml.bind.annotation.XmlElement;
|
|||
import javax.xml.bind.annotation.XmlRootElement;
|
||||
import javax.xml.namespace.QName;
|
||||
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
|
||||
import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonAnyGetter;
|
||||
import com.fasterxml.jackson.annotation.JsonAnySetter;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||
import org.apache.yetus.audience.InterfaceAudience;
|
||||
|
||||
/**
|
||||
* A representation of HBase table descriptors.
|
||||
|
@ -89,12 +88,11 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
*/
|
||||
public TableSchemaModel(HTableDescriptor htd) {
|
||||
setName(htd.getTableName().getNameAsString());
|
||||
for (Map.Entry<Bytes, Bytes> e:
|
||||
htd.getValues().entrySet()) {
|
||||
for (Map.Entry<Bytes, Bytes> e : htd.getValues().entrySet()) {
|
||||
addAttribute(Bytes.toString(e.getKey().get()),
|
||||
Bytes.toString(e.getValue().get()));
|
||||
}
|
||||
for (HColumnDescriptor hcd: htd.getFamilies()) {
|
||||
for (HColumnDescriptor hcd : htd.getFamilies()) {
|
||||
ColumnSchemaModel columnModel = new ColumnSchemaModel();
|
||||
columnModel.setName(hcd.getNameAsString());
|
||||
for (Map.Entry<Bytes, Bytes> e:
|
||||
|
@ -192,7 +190,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
sb.append("{ NAME=> '");
|
||||
sb.append(name);
|
||||
sb.append('\'');
|
||||
for (Map.Entry<QName,Object> e: attrs.entrySet()) {
|
||||
for (Map.Entry<QName,Object> e : attrs.entrySet()) {
|
||||
sb.append(", ");
|
||||
sb.append(e.getKey().getLocalPart());
|
||||
sb.append(" => '");
|
||||
|
@ -223,7 +221,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
*/
|
||||
public boolean __getIsMeta() {
|
||||
Object o = attrs.get(IS_META);
|
||||
return o != null ? Boolean.parseBoolean(o.toString()) : false;
|
||||
return o != null && Boolean.parseBoolean(o.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -231,7 +229,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
*/
|
||||
public boolean __getIsRoot() {
|
||||
Object o = attrs.get(IS_ROOT);
|
||||
return o != null ? Boolean.parseBoolean(o.toString()) : false;
|
||||
return o != null && Boolean.parseBoolean(o.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -267,18 +265,18 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
public byte[] createProtobufOutput() {
|
||||
TableSchema.Builder builder = TableSchema.newBuilder();
|
||||
builder.setName(name);
|
||||
for (Map.Entry<QName, Object> e: attrs.entrySet()) {
|
||||
for (Map.Entry<QName, Object> e : attrs.entrySet()) {
|
||||
TableSchema.Attribute.Builder attrBuilder =
|
||||
TableSchema.Attribute.newBuilder();
|
||||
attrBuilder.setName(e.getKey().getLocalPart());
|
||||
attrBuilder.setValue(e.getValue().toString());
|
||||
builder.addAttrs(attrBuilder);
|
||||
}
|
||||
for (ColumnSchemaModel family: columns) {
|
||||
for (ColumnSchemaModel family : columns) {
|
||||
Map<QName, Object> familyAttrs = family.getAny();
|
||||
ColumnSchema.Builder familyBuilder = ColumnSchema.newBuilder();
|
||||
familyBuilder.setName(family.getName());
|
||||
for (Map.Entry<QName, Object> e: familyAttrs.entrySet()) {
|
||||
for (Map.Entry<QName, Object> e : familyAttrs.entrySet()) {
|
||||
ColumnSchema.Attribute.Builder attrBuilder =
|
||||
ColumnSchema.Attribute.newBuilder();
|
||||
attrBuilder.setName(e.getKey().getLocalPart());
|
||||
|
@ -308,16 +306,16 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
TableSchema.Builder builder = TableSchema.newBuilder();
|
||||
ProtobufUtil.mergeFrom(builder, message);
|
||||
this.setName(builder.getName());
|
||||
for (TableSchema.Attribute attr: builder.getAttrsList()) {
|
||||
for (TableSchema.Attribute attr : builder.getAttrsList()) {
|
||||
this.addAttribute(attr.getName(), attr.getValue());
|
||||
}
|
||||
if (builder.hasReadOnly()) {
|
||||
this.addAttribute(HTableDescriptor.READONLY, builder.getReadOnly());
|
||||
}
|
||||
for (ColumnSchema family: builder.getColumnsList()) {
|
||||
for (ColumnSchema family : builder.getColumnsList()) {
|
||||
ColumnSchemaModel familyModel = new ColumnSchemaModel();
|
||||
familyModel.setName(family.getName());
|
||||
for (ColumnSchema.Attribute attr: family.getAttrsList()) {
|
||||
for (ColumnSchema.Attribute attr : family.getAttrsList()) {
|
||||
familyModel.addAttribute(attr.getName(), attr.getValue());
|
||||
}
|
||||
if (family.hasTtl()) {
|
||||
|
@ -342,17 +340,16 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
|
|||
@JsonIgnore
|
||||
public HTableDescriptor getTableDescriptor() {
|
||||
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(getName()));
|
||||
for (Map.Entry<QName, Object> e: getAny().entrySet()) {
|
||||
for (Map.Entry<QName, Object> e : getAny().entrySet()) {
|
||||
htd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
|
||||
}
|
||||
for (ColumnSchemaModel column: getColumns()) {
|
||||
HColumnDescriptor hcd = new HColumnDescriptor(column.getName());
|
||||
for (Map.Entry<QName, Object> e: column.getAny().entrySet()) {
|
||||
for (Map.Entry<QName, Object> e : column.getAny().entrySet()) {
|
||||
hcd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
|
||||
}
|
||||
htd.addFamily(hcd);
|
||||
}
|
||||
return htd;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue