HBASE-23661 Reduced number of Checkstyle violations in hbase-rest

Signed-off-by: Viraj Jasani <vjasani@apache.org>
This commit is contained in:
Jan Hentschel 2020-01-19 18:17:14 +01:00 committed by GitHub
parent 70c69ba765
commit f6a2238889
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 78 additions and 99 deletions

View File

@ -16,17 +16,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.rest; package org.apache.hadoop.hbase.rest;
import java.io.IOException; import java.io.IOException;
import java.util.Iterator; import java.util.Iterator;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.rest.model.ScannerModel; import org.apache.hadoop.hbase.rest.model.ScannerModel;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
public abstract class ResultGenerator implements Iterator<Cell> { public abstract class ResultGenerator implements Iterator<Cell> {

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.rest; package org.apache.hadoop.hbase.rest;
import java.io.IOException; import java.io.IOException;
@ -26,13 +25,16 @@ import java.util.NoSuchElementException;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -46,8 +48,7 @@ public class RowResultGenerator extends ResultGenerator {
public RowResultGenerator(final String tableName, final RowSpec rowspec, public RowResultGenerator(final String tableName, final RowSpec rowspec,
final Filter filter, final boolean cacheBlocks) final Filter filter, final boolean cacheBlocks)
throws IllegalArgumentException, IOException { throws IllegalArgumentException, IOException {
Table table = RESTServlet.getInstance().getTable(tableName); try (Table table = RESTServlet.getInstance().getTable(tableName)) {
try {
Get get = new Get(rowspec.getRow()); Get get = new Get(rowspec.getRow());
if (rowspec.hasColumns()) { if (rowspec.hasColumns()) {
for (byte[] col : rowspec.getColumns()) { for (byte[] col : rowspec.getColumns()) {
@ -83,8 +84,6 @@ public class RowResultGenerator extends ResultGenerator {
if (e instanceof AccessDeniedException) { if (e instanceof AccessDeniedException) {
throw e; throw e;
} }
} finally {
table.close();
} }
} }

View File

@ -16,18 +16,19 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.rest.client; package org.apache.hadoop.hbase.rest.client;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.http.Header; import org.apache.http.Header;
import org.apache.http.HttpResponse; import org.apache.http.HttpResponse;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** /**
* The HTTP result code, response headers, and body of an HTTP response. * The HTTP result code, response headers, and body of an HTTP response.
*/ */
@ -71,12 +72,12 @@ public class Response {
} }
/** /**
* Constructor * Constructor. Note: this is not thread-safe
*
* @param code the HTTP response code * @param code the HTTP response code
* @param headers headers the HTTP response headers * @param headers headers the HTTP response headers
* @param resp the response * @param resp the response
* @param in Inputstream if the response had one. * @param in Inputstream if the response had one.
* Note: this is not thread-safe
*/ */
public Response(int code, Header[] headers, HttpResponse resp, InputStream in) { public Response(int code, Header[] headers, HttpResponse resp, InputStream in) {
this.code = code; this.code = code;

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.rest.filter; package org.apache.hadoop.hbase.rest.filter;
import java.io.IOException; import java.io.IOException;
@ -29,8 +28,7 @@ import javax.servlet.http.HttpServletRequest;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
public class GZIPRequestStream extends ServletInputStream public class GZIPRequestStream extends ServletInputStream {
{
private GZIPInputStream in; private GZIPInputStream in;
public GZIPRequestStream(HttpServletRequest request) throws IOException { public GZIPRequestStream(HttpServletRequest request) throws IOException {

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.rest.filter; package org.apache.hadoop.hbase.rest.filter;
import java.io.IOException; import java.io.IOException;
@ -29,8 +28,7 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
public class GZIPResponseStream extends ServletOutputStream public class GZIPResponseStream extends ServletOutputStream {
{
private HttpServletResponse response; private HttpServletResponse response;
private GZIPOutputStream out; private GZIPOutputStream out;

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.rest.filter; package org.apache.hadoop.hbase.rest.filter;
import java.io.IOException; import java.io.IOException;
@ -35,15 +34,16 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG)
public class GzipFilter implements Filter { public class GzipFilter implements Filter {
private Set<String> mimeTypes = new HashSet<>(); private Set<String> mimeTypes = new HashSet<>();
@Override @Override
public void init(FilterConfig filterConfig) throws ServletException { public void init(FilterConfig filterConfig) {
String s = filterConfig.getInitParameter("mimeTypes"); String s = filterConfig.getInitParameter("mimeTypes");
if (s != null) { if (s != null) {
StringTokenizer tok = new StringTokenizer(s, ",", false); StringTokenizer tok = new StringTokenizer(s, ",", false);
@ -66,11 +66,11 @@ public class GzipFilter implements Filter {
String acceptEncoding = request.getHeader("accept-encoding"); String acceptEncoding = request.getHeader("accept-encoding");
String contentType = request.getHeader("content-type"); String contentType = request.getHeader("content-type");
if ((contentEncoding != null) && if ((contentEncoding != null) &&
(contentEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) { (contentEncoding.toLowerCase(Locale.ROOT).contains("gzip"))) {
request = new GZIPRequestWrapper(request); request = new GZIPRequestWrapper(request);
} }
if (((acceptEncoding != null) && if (((acceptEncoding != null) &&
(acceptEncoding.toLowerCase(Locale.ROOT).indexOf("gzip") > -1)) || (acceptEncoding.toLowerCase(Locale.ROOT).contains("gzip"))) ||
((contentType != null) && mimeTypes.contains(contentType))) { ((contentType != null) && mimeTypes.contains(contentType))) {
response = new GZIPResponseWrapper(response); response = new GZIPResponseWrapper(response);
} }
@ -82,5 +82,4 @@ public class GzipFilter implements Filter {
} }
} }
} }
} }

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.rest.filter; package org.apache.hadoop.hbase.rest.filter;
import java.io.IOException; import java.io.IOException;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
@ -34,10 +35,12 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
/** /**
* This filter provides protection against cross site request forgery (CSRF) * This filter provides protection against cross site request forgery (CSRF)
@ -48,7 +51,6 @@ import org.apache.hadoop.conf.Configuration;
*/ */
@InterfaceAudience.Public @InterfaceAudience.Public
public class RestCsrfPreventionFilter implements Filter { public class RestCsrfPreventionFilter implements Filter {
private static final Logger LOG = private static final Logger LOG =
LoggerFactory.getLogger(RestCsrfPreventionFilter.class); LoggerFactory.getLogger(RestCsrfPreventionFilter.class);
@ -66,7 +68,7 @@ public class RestCsrfPreventionFilter implements Filter {
private Set<Pattern> browserUserAgents; private Set<Pattern> browserUserAgents;
@Override @Override
public void init(FilterConfig filterConfig) throws ServletException { public void init(FilterConfig filterConfig) {
String customHeader = filterConfig.getInitParameter(CUSTOM_HEADER_PARAM); String customHeader = filterConfig.getInitParameter(CUSTOM_HEADER_PARAM);
if (customHeader != null) { if (customHeader != null) {
headerName = customHeader; headerName = customHeader;
@ -100,9 +102,7 @@ public class RestCsrfPreventionFilter implements Filter {
void parseMethodsToIgnore(String mti) { void parseMethodsToIgnore(String mti) {
String[] methods = mti.split(","); String[] methods = mti.split(",");
methodsToIgnore = new HashSet<>(); methodsToIgnore = new HashSet<>();
for (int i = 0; i < methods.length; i++) { Collections.addAll(methodsToIgnore, methods);
methodsToIgnore.add(methods[i]);
}
} }
/** /**
@ -143,7 +143,6 @@ public class RestCsrfPreventionFilter implements Filter {
* container configuration mechanisms to insert the filter. * container configuration mechanisms to insert the filter.
*/ */
public interface HttpInteraction { public interface HttpInteraction {
/** /**
* Returns the value of a header. * Returns the value of a header.
* *
@ -223,8 +222,7 @@ public class RestCsrfPreventionFilter implements Filter {
* @return mapping of configuration properties to be used for filter * @return mapping of configuration properties to be used for filter
* initialization * initialization
*/ */
public static Map<String, String> getFilterParams(Configuration conf, public static Map<String, String> getFilterParams(Configuration conf, String confPrefix) {
String confPrefix) {
Map<String, String> filterConfigMap = new HashMap<>(); Map<String, String> filterConfigMap = new HashMap<>();
for (Map.Entry<String, String> entry : conf) { for (Map.Entry<String, String> entry : conf) {
String name = entry.getKey(); String name = entry.getKey();
@ -240,9 +238,7 @@ public class RestCsrfPreventionFilter implements Filter {
/** /**
* {@link HttpInteraction} implementation for use in the servlet filter. * {@link HttpInteraction} implementation for use in the servlet filter.
*/ */
private static final class ServletFilterHttpInteraction private static final class ServletFilterHttpInteraction implements HttpInteraction {
implements HttpInteraction {
private final FilterChain chain; private final FilterChain chain;
private final HttpServletRequest httpRequest; private final HttpServletRequest httpRequest;
private final HttpServletResponse httpResponse; private final HttpServletResponse httpResponse;

View File

@ -16,7 +16,6 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.rest.model; package org.apache.hadoop.hbase.rest.model;
import java.io.IOException; import java.io.IOException;
@ -29,13 +28,14 @@ import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler; import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell; import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet; import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Representation of a grouping of cells. May contain cells from more than * Representation of a grouping of cells. May contain cells from more than
@ -74,7 +74,6 @@ import org.apache.hadoop.hbase.rest.protobuf.generated.CellSetMessage.CellSet;
@XmlAccessorType(XmlAccessType.FIELD) @XmlAccessorType(XmlAccessType.FIELD)
@InterfaceAudience.Private @InterfaceAudience.Private
public class CellSetModel implements Serializable, ProtobufMessageHandler { public class CellSetModel implements Serializable, ProtobufMessageHandler {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
@XmlElement(name="Row") @XmlElement(name="Row")

View File

@ -16,9 +16,10 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.rest.model; package org.apache.hadoop.hbase.rest.model;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
@ -29,14 +30,13 @@ import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper; import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus; import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Representation of the status of a storage cluster: * Representation of the status of a storage cluster:
@ -96,8 +96,7 @@ import com.fasterxml.jackson.annotation.JsonProperty;
*/ */
@XmlRootElement(name="ClusterStatus") @XmlRootElement(name="ClusterStatus")
@InterfaceAudience.Private @InterfaceAudience.Private
public class StorageClusterStatusModel public class StorageClusterStatusModel implements Serializable, ProtobufMessageHandler {
implements Serializable, ProtobufMessageHandler {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
/** /**
@ -499,8 +498,7 @@ public class StorageClusterStatusModel
} }
/** /**
* @param requests the number of requests per second processed by the * @param requests the number of requests per second processed by the region server
* region server
*/ */
public void setRequests(long requests) { public void setRequests(long requests) {
this.requests = requests; this.requests = requests;
@ -589,8 +587,8 @@ public class StorageClusterStatusModel
} }
/** /**
* @return the total number of requests per second handled by the cluster in * @return the total number of requests per second handled by the cluster in the last reporting
* the last reporting interval * interval
*/ */
@XmlAttribute @XmlAttribute
public long getRequests() { public long getRequests() {
@ -627,8 +625,7 @@ public class StorageClusterStatusModel
} }
/** /**
* @param requests the total number of requests per second handled by the * @param requests the total number of requests per second handled by the cluster
* cluster
*/ */
public void setRequests(long requests) { public void setRequests(long requests) {
this.requests = requests; this.requests = requests;
@ -641,10 +638,6 @@ public class StorageClusterStatusModel
this.averageLoad = averageLoad; this.averageLoad = averageLoad;
} }
/*
* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override @Override
public String toString() { public String toString() {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
@ -754,8 +747,7 @@ public class StorageClusterStatusModel
} }
@Override @Override
public ProtobufMessageHandler getObjectFromMessage(byte[] message) public ProtobufMessageHandler getObjectFromMessage(byte[] message) throws IOException {
throws IOException {
StorageClusterStatus.Builder builder = StorageClusterStatus.newBuilder(); StorageClusterStatus.Builder builder = StorageClusterStatus.newBuilder();
ProtobufUtil.mergeFrom(builder, message); ProtobufUtil.mergeFrom(builder, message);
if (builder.hasRegions()) { if (builder.hasRegions()) {

View File

@ -16,14 +16,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.rest.model; package org.apache.hadoop.hbase.rest.model;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable; import java.io.Serializable;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -33,20 +36,16 @@ import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.namespace.QName; import javax.xml.namespace.QName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema; import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema; import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
/** /**
* A representation of HBase table descriptors. * A representation of HBase table descriptors.
@ -89,8 +88,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
*/ */
public TableSchemaModel(HTableDescriptor htd) { public TableSchemaModel(HTableDescriptor htd) {
setName(htd.getTableName().getNameAsString()); setName(htd.getTableName().getNameAsString());
for (Map.Entry<Bytes, Bytes> e: for (Map.Entry<Bytes, Bytes> e : htd.getValues().entrySet()) {
htd.getValues().entrySet()) {
addAttribute(Bytes.toString(e.getKey().get()), addAttribute(Bytes.toString(e.getKey().get()),
Bytes.toString(e.getValue().get())); Bytes.toString(e.getValue().get()));
} }
@ -223,7 +221,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
*/ */
public boolean __getIsMeta() { public boolean __getIsMeta() {
Object o = attrs.get(IS_META); Object o = attrs.get(IS_META);
return o != null ? Boolean.parseBoolean(o.toString()) : false; return o != null && Boolean.parseBoolean(o.toString());
} }
/** /**
@ -231,7 +229,7 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
*/ */
public boolean __getIsRoot() { public boolean __getIsRoot() {
Object o = attrs.get(IS_ROOT); Object o = attrs.get(IS_ROOT);
return o != null ? Boolean.parseBoolean(o.toString()) : false; return o != null && Boolean.parseBoolean(o.toString());
} }
/** /**
@ -354,5 +352,4 @@ public class TableSchemaModel implements Serializable, ProtobufMessageHandler {
} }
return htd; return htd;
} }
} }