HBASE-23661 Reduced number of Checkstyle violations in hbase-rest

Signed-off-by: stack <stack@apache.org>
This commit is contained in:
Jan Hentschel 2020-01-26 13:38:42 +01:00 committed by GitHub
parent 21f2eddd20
commit 4a39f0a2c5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 91 additions and 126 deletions

View File

@ -16,14 +16,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.rest.model.ScannerModel;

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest;
import java.io.IOException;
@ -25,10 +24,10 @@ import java.util.NoSuchElementException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.NeedUnmanagedConnectionException;
import org.apache.hadoop.hbase.client.Result;
@ -46,8 +45,7 @@ public class RowResultGenerator extends ResultGenerator {
public RowResultGenerator(final String tableName, final RowSpec rowspec,
final Filter filter, final boolean cacheBlocks)
throws IllegalArgumentException, IOException {
Table table = RESTServlet.getInstance().getTable(tableName);
try {
try (Table table = RESTServlet.getInstance().getTable(tableName)) {
Get get = new Get(rowspec.getRow());
if (rowspec.hasColumns()) {
for (byte[] col : rowspec.getColumns()) {
@ -79,8 +77,6 @@ public class RowResultGenerator extends ResultGenerator {
// help to avoid confusion by leaving a record of what happened here in
// the log.
LOG.warn(StringUtils.stringifyException(e));
} finally {
table.close();
}
}

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest.client;
import java.io.InputStream;
@ -26,7 +25,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* The HTTP result code, response headers, and body of a HTTP response.
* The HTTP result code, response headers, and body of an HTTP response.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
@ -66,7 +65,8 @@ public class Response {
}
/**
* Constructor
* Constructor.
*
* @param code the HTTP response code
* @param headers headers the HTTP response headers
* @param body the response body, can be null

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest.filter;
import java.io.IOException;
@ -28,8 +27,7 @@ import javax.servlet.http.HttpServletRequest;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@InterfaceAudience.Private
public class GZIPRequestStream extends ServletInputStream
{
public class GZIPRequestStream extends ServletInputStream {
private GZIPInputStream in;
public GZIPRequestStream(HttpServletRequest request) throws IOException {

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest.filter;
import java.io.IOException;
@ -28,8 +27,7 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@InterfaceAudience.Private
public class GZIPResponseStream extends ServletOutputStream
{
public class GZIPResponseStream extends ServletOutputStream {
private HttpServletResponse response;
private GZIPOutputStream out;

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest.filter;
import java.io.IOException;
@ -35,15 +34,15 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class GzipFilter implements Filter {
private Set<String> mimeTypes = new HashSet<String>();
private Set<String> mimeTypes = new HashSet<>();
@Override
public void init(FilterConfig filterConfig) throws ServletException {
public void init(FilterConfig filterConfig) {
String s = filterConfig.getInitParameter("mimeTypes");
if (s != null) {
StringTokenizer tok = new StringTokenizer(s, ",", false);
@ -66,11 +65,11 @@ public class GzipFilter implements Filter {
String acceptEncoding = request.getHeader("accept-encoding");
String contentType = request.getHeader("content-type");
if ((contentEncoding != null) &&
(contentEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) {
(contentEncoding.toLowerCase(Locale.ROOT).contains("gzip"))) {
request = new GZIPRequestWrapper(request);
}
if (((acceptEncoding != null) &&
(acceptEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) ||
(acceptEncoding.toLowerCase(Locale.ROOT).contains("gzip"))) ||
((contentType != null) && mimeTypes.contains(contentType))) {
response = new GZIPResponseWrapper(response);
}
@ -82,5 +81,4 @@ public class GzipFilter implements Filter {
}
}
}
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.rest.filter;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@ -36,9 +37,9 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
/**
* This filter provides protection against cross site request forgery (CSRF)
@ -50,9 +51,7 @@ import org.apache.hadoop.conf.Configuration;
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class RestCsrfPreventionFilter implements Filter {
private static final Log LOG =
LogFactory.getLog(RestCsrfPreventionFilter.class);
private static final Log LOG = LogFactory.getLog(RestCsrfPreventionFilter.class);
public static final String HEADER_USER_AGENT = "User-Agent";
public static final String BROWSER_USER_AGENT_PARAM =
@ -68,7 +67,7 @@ public class RestCsrfPreventionFilter implements Filter {
private Set<Pattern> browserUserAgents;
@Override
public void init(FilterConfig filterConfig) throws ServletException {
public void init(FilterConfig filterConfig) {
String customHeader = filterConfig.getInitParameter(CUSTOM_HEADER_PARAM);
if (customHeader != null) {
headerName = customHeader;
@ -93,7 +92,7 @@ public class RestCsrfPreventionFilter implements Filter {
void parseBrowserUserAgents(String userAgents) {
String[] agentsArray = userAgents.split(",");
browserUserAgents = new HashSet<Pattern>();
browserUserAgents = new HashSet<>();
for (String patternString : agentsArray) {
browserUserAgents.add(Pattern.compile(patternString));
}
@ -101,10 +100,8 @@ public class RestCsrfPreventionFilter implements Filter {
void parseMethodsToIgnore(String mti) {
String[] methods = mti.split(",");
methodsToIgnore = new HashSet<String>();
for (int i = 0; i < methods.length; i++) {
methodsToIgnore.add(methods[i]);
}
methodsToIgnore = new HashSet<>();
Collections.addAll(methodsToIgnore, methods);
}
/**
@ -145,7 +142,6 @@ public class RestCsrfPreventionFilter implements Filter {
* container configuration mechanisms to insert the filter.
*/
public interface HttpInteraction {
/**
* Returns the value of a header.
*
@ -225,8 +221,7 @@ public class RestCsrfPreventionFilter implements Filter {
* @return mapping of configuration properties to be used for filter
* initialization
*/
public static Map<String, String> getFilterParams(Configuration conf,
String confPrefix) {
public static Map<String, String> getFilterParams(Configuration conf, String confPrefix) {
Map<String, String> filterConfigMap = new HashMap<>();
for (Map.Entry<String, String> entry : conf) {
String name = entry.getKey();
@ -242,9 +237,7 @@ public class RestCsrfPreventionFilter implements Filter {
/**
* {@link HttpInteraction} implementation for use in the servlet filter.
*/
private static final class ServletFilterHttpInteraction
implements HttpInteraction {
private static final class ServletFilterHttpInteraction implements HttpInteraction {
private final FilterChain chain;
private final HttpServletRequest httpRequest;
private final HttpServletResponse httpResponse;

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest.model;
import java.io.IOException;
@ -29,13 +28,13 @@ import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
import org.apache.hadoop.hbase.util.ByteStringer;
/**
* Representation of a grouping of cells. May contain cells from more than
@ -74,7 +73,6 @@ import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
@XmlAccessorType(XmlAccessType.FIELD)
@InterfaceAudience.Private
public class CellSetModel implements Serializable, ProtobufMessageHandler {
private static final long serialVersionUID = 1L;
@XmlElement(name="Row")
@ -84,7 +82,7 @@ public class CellSetModel implements Serializable, ProtobufMessageHandler {
* Constructor
*/
public CellSetModel() {
this.rows = new ArrayList<RowModel>();
this.rows = new ArrayList<>();
}
/**

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest.model;
import com.fasterxml.jackson.annotation.JsonProperty;
@ -31,11 +30,11 @@ import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
/**
@ -96,8 +95,7 @@ import org.apache.hadoop.hbase.util.Bytes;
*/
@XmlRootElement(name="ClusterStatus")
@InterfaceAudience.Private
public class StorageClusterStatusModel
implements Serializable, ProtobufMessageHandler {
public class StorageClusterStatusModel implements Serializable, ProtobufMessageHandler {
private static final long serialVersionUID = 1L;
/**
@ -111,6 +109,7 @@ public class StorageClusterStatusModel
*/
public static class Region implements Serializable {
private static final long serialVersionUID = -1326683840086398193L;
private byte[] name;
private int stores;
private int storefiles;
@ -374,7 +373,7 @@ public class StorageClusterStatusModel
private long requests;
private int heapSizeMB;
private int maxHeapSizeMB;
private List<Region> regions = new ArrayList<Region>();
private List<Region> regions = new ArrayList<>();
/**
* Add a region name to the list
@ -498,16 +497,15 @@ public class StorageClusterStatusModel
}
/**
* @param requests the number of requests per second processed by the
* region server
* @param requests the number of requests per second processed by the region server
*/
public void setRequests(long requests) {
this.requests = requests;
}
}
private List<Node> liveNodes = new ArrayList<Node>();
private List<String> deadNodes = new ArrayList<String>();
private List<Node> liveNodes = new ArrayList<>();
private List<String> deadNodes = new ArrayList<>();
private int regions;
private long requests;
private double averageLoad;
@ -588,8 +586,8 @@ public class StorageClusterStatusModel
}
/**
* @return the total number of requests per second handled by the cluster in
* the last reporting interval
* @return the total number of requests per second handled by the cluster in the last reporting
* interval
*/
@XmlAttribute
public long getRequests() {
@ -626,8 +624,7 @@ public class StorageClusterStatusModel
}
/**
* @param requests the total number of requests per second handled by the
* cluster
* @param requests the total number of requests per second handled by the cluster
*/
public void setRequests(int requests) {
this.requests = requests;
@ -640,10 +637,6 @@ public class StorageClusterStatusModel
this.averageLoad = averageLoad;
}
/*
* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
@ -753,8 +746,7 @@ public class StorageClusterStatusModel
}
@Override
public ProtobufMessageHandler getObjectFromMessage(byte[] message)
throws IOException {
public ProtobufMessageHandler getObjectFromMessage(byte[] message) throws IOException {
StorageClusterStatus.Builder builder = StorageClusterStatus.newBuilder();
ProtobufUtil.mergeFrom(builder, message);
if (builder.hasRegions()) {

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.rest.model;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
@ -26,8 +25,8 @@ import com.fasterxml.jackson.annotation.JsonIgnore;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@ -37,11 +36,11 @@ import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.namespace.QName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
@ -76,8 +75,8 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
new QName(HColumnDescriptor.COMPRESSION);
private String name;
private Map<QName,Object> attrs = new LinkedHashMap<QName,Object>();
private List<ColumnSchemaModel> columns = new ArrayList<ColumnSchemaModel>();
private Map<QName,Object> attrs = new LinkedHashMap<>();
private List<ColumnSchemaModel> columns = new ArrayList<>();
/**
* Default constructor.
@ -90,8 +89,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
*/
public TableSchemaModel(HTableDescriptor htd) {
setName(htd.getTableName().getNameAsString());
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e:
htd.getValues().entrySet()) {
for (Map.Entry<ImmutableBytesWritable, ImmutableBytesWritable> e : htd.getValues().entrySet()) {
addAttribute(Bytes.toString(e.getKey().get()),
Bytes.toString(e.getValue().get()));
}
@ -224,7 +222,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
*/
public boolean __getIsMeta() {
Object o = attrs.get(IS_META);
return o != null ? Boolean.parseBoolean(o.toString()) : false;
return o != null && Boolean.parseBoolean(o.toString());
}
/**
@ -232,7 +230,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
*/
public boolean __getIsRoot() {
Object o = attrs.get(IS_ROOT);
return o != null ? Boolean.parseBoolean(o.toString()) : false;
return o != null && Boolean.parseBoolean(o.toString());
}
/**
@ -240,8 +238,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
*/
public boolean __getReadOnly() {
Object o = attrs.get(READONLY);
return o != null ?
Boolean.parseBoolean(o.toString()) : HTableDescriptor.DEFAULT_READONLY;
return o != null ? Boolean.parseBoolean(o.toString()) : HTableDescriptor.DEFAULT_READONLY;
}
/**
@ -288,12 +285,10 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
familyBuilder.addAttrs(attrBuilder);
}
if (familyAttrs.containsKey(TTL)) {
familyBuilder.setTtl(
Integer.parseInt(familyAttrs.get(TTL).toString()));
familyBuilder.setTtl(Integer.parseInt(familyAttrs.get(TTL).toString()));
}
if (familyAttrs.containsKey(VERSIONS)) {
familyBuilder.setMaxVersions(
Integer.parseInt(familyAttrs.get(VERSIONS).toString()));
familyBuilder.setMaxVersions(Integer.parseInt(familyAttrs.get(VERSIONS).toString()));
}
if (familyAttrs.containsKey(COMPRESSION)) {
familyBuilder.setCompression(familyAttrs.get(COMPRESSION).toString());
@ -301,8 +296,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
builder.addColumns(familyBuilder);
}
if (attrs.containsKey(READONLY)) {
builder.setReadOnly(
Boolean.parseBoolean(attrs.get(READONLY).toString()));
builder.setReadOnly(Boolean.parseBoolean(attrs.get(READONLY).toString()));
}
return builder.build().toByteArray();
}
@ -359,5 +353,4 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
}
return htd;
}
}