HBASE-16338 Remove Jackson1 deps

* Change imports from org.codehaus to com.fasterxml
* Exclude transitive jackson1 from hadoop and others
* Minor test cleanup to add assert messages, fix some parameter order
* Add anti-pattern check for using jackson 1 imports
* Add explicit non-null serialization directive to ScannerModel
This commit is contained in:
Mike Drob 2017-10-02 16:31:48 -05:00
parent a43a00e89c
commit 5facaded90
53 changed files with 317 additions and 281 deletions

View File

@ -428,6 +428,12 @@ function hbaseanti_patchfile
((result=result+1))
fi
warnings=$(${GREP} 'import org.codehaus.jackson' "${patchfile}")
if [[ ${warnings} -gt 0 ]]; then
add_vote_table -1 hbaseanti "" "The patch appears use Jackson 1 classes/annotations: ${warnings}."
((result=result+1))
fi
if [[ ${result} -gt 0 ]]; then
return 1
fi

View File

@ -168,10 +168,6 @@
<groupId>org.apache.htrace</groupId>
<artifactId>htrace-core</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</dependency>
<dependency>
<groupId>org.jruby.jcodings</groupId>
<artifactId>jcodings</artifactId>
@ -218,6 +214,10 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
</dependencies>
<profiles>

View File

@ -21,8 +21,8 @@ package org.apache.hadoop.hbase.util;
import java.io.IOException;
import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.yetus.audience.InterfaceAudience;
import org.codehaus.jackson.map.ObjectMapper;
/**
* Utility class for converting objects to JSON

View File

@ -62,10 +62,10 @@ import org.apache.hadoop.hbase.testclassification.ClientTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.BuilderStyleTest;
import org.apache.hadoop.hbase.util.Bytes;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.fasterxml.jackson.databind.ObjectMapper;
/**
* Run tests that use the functionality of the Operation superclass for

View File

@ -294,6 +294,10 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
</dependencies>
<profiles>

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.hbase;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.util.ReflectionUtils;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.glassfish.jersey.client.authentication.HttpAuthenticationFeature;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
@ -222,8 +222,8 @@ public class RESTApiClusterManager extends Configured implements ClusterManager
if (hosts != null) {
// Iterate through the list of hosts, stopping once you've reached the requested hostname.
for (JsonNode host : hosts) {
if (host.get("hostname").getTextValue().equals(hostname)) {
hostId = host.get("hostId").getTextValue();
if (host.get("hostname").textValue().equals(hostname)) {
hostId = host.get("hostId").textValue();
break;
}
}
@ -272,12 +272,12 @@ public class RESTApiClusterManager extends Configured implements ClusterManager
if (roles != null) {
// Iterate through the list of roles, stopping once the requested one is found.
for (JsonNode role : roles) {
if (role.get("hostRef").get("hostId").getTextValue().equals(hostId) &&
if (role.get("hostRef").get("hostId").textValue().equals(hostId) &&
role.get("type")
.getTextValue()
.textValue()
.toLowerCase(Locale.ROOT)
.equals(roleType.toLowerCase(Locale.ROOT))) {
roleValue = role.get(property).getTextValue();
roleValue = role.get(property).textValue();
break;
}
}
@ -306,8 +306,8 @@ public class RESTApiClusterManager extends Configured implements ClusterManager
if (services != null) {
// Iterate through the list of services, stopping once the requested one is found.
for (JsonNode serviceEntry : services) {
if (serviceEntry.get("type").getTextValue().equals(service.toString())) {
serviceName = serviceEntry.get("name").getTextValue();
if (serviceEntry.get("type").textValue().equals(service.toString())) {
serviceName = serviceEntry.get("name").textValue();
break;
}
}

View File

@ -285,14 +285,6 @@
<groupId>org.apache.zookeeper</groupId>
<artifactId>zookeeper</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
@ -305,6 +297,10 @@
<version>${netty.hadoop.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
</dependencies>
<profiles>
<!-- Skip the tests in this module -->

View File

@ -18,8 +18,6 @@
*/
package org.apache.hadoop.hbase;
import static org.codehaus.jackson.map.SerializationConfig.Feature.SORT_PROPERTIES_ALPHABETICALLY;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.reflect.Constructor;
@ -71,8 +69,6 @@ import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterAllFilter;
import org.apache.hadoop.hbase.filter.FilterList;
@ -85,7 +81,6 @@ import org.apache.hadoop.hbase.io.hfile.RandomDistribution;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.regionserver.CompactingMemStore;
import org.apache.hadoop.hbase.regionserver.TestHRegionFileSystem;
import org.apache.hadoop.hbase.trace.HBaseHTraceConfiguration;
import org.apache.hadoop.hbase.trace.SpanReceiverHost;
import org.apache.hadoop.hbase.util.*;
@ -98,7 +93,6 @@ import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.codehaus.jackson.map.ObjectMapper;
import org.apache.htrace.Sampler;
import org.apache.htrace.Trace;
import org.apache.htrace.TraceScope;
@ -108,6 +102,8 @@ import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFa
import com.codahale.metrics.Histogram;
import com.codahale.metrics.UniformReservoir;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.MapperFeature;
/**
* Script used evaluating HBase performance and scalability. Runs a HBase
@ -133,7 +129,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
private static final Log LOG = LogFactory.getLog(PerformanceEvaluation.class.getName());
private static final ObjectMapper MAPPER = new ObjectMapper();
static {
MAPPER.configure(SORT_PROPERTIES_ALPHABETICALLY, true);
MAPPER.configure(MapperFeature.SORT_PROPERTIES_ALPHABETICALLY, true);
}
public static final String TABLE_NAME = "TestTable";

View File

@ -37,9 +37,6 @@ import org.apache.hadoop.hbase.PerformanceEvaluation.RandomReadTest;
import org.apache.hadoop.hbase.PerformanceEvaluation.TestOptions;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@ -47,6 +44,9 @@ import org.junit.experimental.categories.Category;
import com.codahale.metrics.Histogram;
import com.codahale.metrics.Snapshot;
import com.codahale.metrics.UniformReservoir;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
@Category({MiscTests.class, SmallTests.class})
public class TestPerformanceEvaluation {

View File

@ -512,19 +512,6 @@ under the License.
</licenses>
</project>
</supplement>
<supplement>
<project>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson1</artifactId>
<licenses>
<license>
<name>CDDL 1.1</name>
<url>https://glassfish.java.net/public/CDDL+GPL_1_1.html</url>
<distribution>repo</distribution>
</license>
</licenses>
</project>
</supplement>
<supplement>
<project>
<groupId>org.glassfish.web</groupId>

View File

@ -206,6 +206,15 @@
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-hadoop-compat</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>${compat.module}</artifactId>
<version>${project.version}</version>
</dependency>
<!--Below MR wanted by PE-->
<dependency>
<groupId>org.apache.hbase</groupId>
@ -300,8 +309,8 @@
<artifactId>jersey-container-servlet-core</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson1</artifactId>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
</dependency>
<dependency>
<!--For JspC used in ant task-->
@ -320,14 +329,6 @@
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-all</artifactId>

View File

@ -35,23 +35,22 @@ import org.apache.hadoop.hbase.rest.model.CellSetModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.util.Bytes;
public class ProtobufStreamingOutput implements StreamingOutput {
private static final Log LOG = LogFactory.getLog(ProtobufStreamingOutput.class);
public class ProtobufStreamingUtil implements StreamingOutput {
private static final Log LOG = LogFactory.getLog(ProtobufStreamingUtil.class);
private String contentType;
private ResultScanner resultScanner;
private int limit;
private int fetchSize;
protected ProtobufStreamingUtil(ResultScanner scanner, String type, int limit, int fetchSize) {
protected ProtobufStreamingOutput(ResultScanner scanner, String type, int limit, int fetchSize) {
this.resultScanner = scanner;
this.contentType = type;
this.limit = limit;
this.fetchSize = fetchSize;
if (LOG.isTraceEnabled()) {
LOG.trace("Created ScanStreamingUtil with content type = " + this.contentType
+ " user limit : " + this.limit + " scan fetch size : " + this.fetchSize);
LOG.trace("Created StreamingOutput with content type = " + this.contentType
+ " user limit : " + this.limit + " scan fetch size : " + this.fetchSize);
}
}

View File

@ -27,6 +27,7 @@ import java.util.Set;
import java.util.EnumSet;
import java.util.concurrent.ArrayBlockingQueue;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
@ -67,7 +68,6 @@ import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.eclipse.jetty.jmx.MBeanContainer;
import org.eclipse.jetty.servlet.FilterHolder;
import org.glassfish.jersey.jackson1.Jackson1Feature;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
@ -238,7 +238,7 @@ public class RESTServer implements Constants {
// set up the Jersey servlet container for Jetty
ResourceConfig application = new ResourceConfig().
packages("org.apache.hadoop.hbase.rest").register(Jackson1Feature.class);
packages("org.apache.hadoop.hbase.rest").register(JacksonJaxbJsonProvider.class);
ServletHolder sh = new ServletHolder(new ServletContainer(application));
// Set the default max thread number to 100 to limit

View File

@ -23,15 +23,13 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.StreamingOutput;
import javax.ws.rs.core.UriInfo;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
@ -47,13 +45,14 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
import org.codehaus.jackson.annotate.JsonIgnore;
import org.codehaus.jackson.annotate.JsonProperty;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
@InterfaceAudience.Private
public class TableScanResource extends ResourceBase {
private static final Log LOG = LogFactory.getLog(TableScanResource.class);
TableResource tableResource;
ResultScanner results;
int userRequestedLimit;
@ -75,23 +74,14 @@ public class TableScanResource extends ResourceBase {
servlet.getMetrics().incrementSucessfulScanRequests(1);
final Iterator<Result> itr = results.iterator();
return new CellSetModelStream(new ArrayList<RowModel>() {
@Override
public Iterator<RowModel> iterator() {
return new Iterator<RowModel>() {
int count = rowsToSend;
@Override
public boolean hasNext() {
if (count > 0) {
return itr.hasNext();
} else {
return false;
}
}
@Override
public void remove() {
throw new UnsupportedOperationException(
"Remove method cannot be used in CellSetModelStream");
return count > 0 && itr.hasNext();
}
@Override
@ -127,7 +117,7 @@ public class TableScanResource extends ResourceBase {
servlet.getMetrics().incrementRequests(1);
try {
int fetchSize = this.servlet.getConfiguration().getInt(Constants.SCAN_FETCH_SIZE, 10);
ProtobufStreamingUtil stream = new ProtobufStreamingUtil(this.results, contentType,
StreamingOutput stream = new ProtobufStreamingOutput(this.results, contentType,
userRequestedLimit, fetchSize);
servlet.getMetrics().incrementSucessfulScanRequests(1);
ResponseBuilder response = Response.ok(stream);

View File

@ -28,6 +28,7 @@ import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlValue;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
@ -38,7 +39,6 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.CellMessage.Cell;
import org.codehaus.jackson.annotate.JsonProperty;
/**
* Representation of a cell. A cell is a single value associated a column and

View File

@ -31,8 +31,9 @@ import javax.xml.namespace.QName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.codehaus.jackson.annotate.JsonAnyGetter;
import org.codehaus.jackson.annotate.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
/**
* Representation of a column family schema.

View File

@ -34,7 +34,8 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.protobuf.generated.NamespacesMessage.Namespaces;
import org.codehaus.jackson.annotate.JsonProperty;
import com.fasterxml.jackson.annotation.JsonProperty;
/**

View File

@ -30,12 +30,12 @@ import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.codehaus.jackson.annotate.JsonProperty;
/**
* Representation of a row. A row is a related set of cells, grouped by common

View File

@ -44,7 +44,6 @@ import org.apache.hadoop.hbase.filter.ColumnPaginationFilter;
import org.apache.hadoop.hbase.filter.ColumnPrefixFilter;
import org.apache.hadoop.hbase.filter.ColumnRangeFilter;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.DependentColumnFilter;
import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.filter.Filter;
@ -77,10 +76,10 @@ import org.apache.hadoop.hbase.util.Base64;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import com.google.protobuf.ByteString;
import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
/**
* A representation of Scanner parameters.
*
@ -101,6 +100,7 @@ import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
* </pre>
*/
@XmlRootElement(name="Scanner")
@JsonInclude(JsonInclude.Include.NON_NULL)
@InterfaceAudience.Private
public class ScannerModel implements ProtobufMessageHandler, Serializable {

View File

@ -36,6 +36,8 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.protobuf.generated.StorageClusterStatusMessage.StorageClusterStatus;
import org.apache.hadoop.hbase.util.Bytes;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Representation of the status of a storage cluster:
* <p>
@ -561,6 +563,8 @@ public class StorageClusterStatusModel
*/
@XmlElement(name = "Node")
@XmlElementWrapper(name = "LiveNodes")
// workaround https://github.com/FasterXML/jackson-dataformat-xml/issues/192
@JsonProperty("LiveNodes")
public List<Node> getLiveNodes() {
return liveNodes;
}
@ -570,6 +574,8 @@ public class StorageClusterStatusModel
*/
@XmlElement(name = "Node")
@XmlElementWrapper(name = "DeadNodes")
// workaround https://github.com/FasterXML/jackson-dataformat-xml/issues/192
@JsonProperty("DeadNodes")
public List<String> getDeadNodes() {
return deadNodes;
}

View File

@ -19,13 +19,10 @@
package org.apache.hadoop.hbase.rest.model;
import org.codehaus.jackson.annotate.JsonValue;
import java.io.Serializable;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlValue;
import org.apache.yetus.audience.InterfaceAudience;

View File

@ -43,9 +43,10 @@ import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.rest.protobuf.generated.ColumnSchemaMessage.ColumnSchema;
import org.apache.hadoop.hbase.rest.protobuf.generated.TableSchemaMessage.TableSchema;
import org.apache.hadoop.hbase.util.Bytes;
import org.codehaus.jackson.annotate.JsonAnyGetter;
import org.codehaus.jackson.annotate.JsonAnySetter;
import org.codehaus.jackson.annotate.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import com.fasterxml.jackson.annotation.JsonIgnore;
/**
* A representation of HBase table descriptors.

View File

@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hbase.rest;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -32,9 +33,7 @@ import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.glassfish.jersey.jackson1.Jackson1Feature;
import org.glassfish.jersey.server.ResourceConfig;
import org.glassfish.jersey.servlet.ServletContainer;
@ -63,7 +62,7 @@ public class HBaseRESTTestingUtility {
// set up the Jersey servlet container for Jetty
ResourceConfig app = new ResourceConfig().
packages("org.apache.hadoop.hbase.rest").register(Jackson1Feature.class);
packages("org.apache.hadoop.hbase.rest").register(JacksonJaxbJsonProvider.class);
ServletHolder sh = new ServletHolder(new ServletContainer(app));
// set up Jetty and run the embedded server

View File

@ -30,6 +30,8 @@ import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@ -43,8 +45,6 @@ import org.apache.hadoop.hbase.rest.model.CellModel;
import org.apache.hadoop.hbase.rest.model.CellSetModel;
import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.util.Bytes;
import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;

View File

@ -33,7 +33,7 @@ public class TestDeleteRow extends RowResourceBase {
@Test
public void testDeleteNonExistentColumn() throws Exception {
Response response = putValueJson(TABLE, ROW_1, COLUMN_1, VALUE_1);
assertEquals(response.getCode(), 200);
assertEquals(200, response.getCode());
response = checkAndDeleteJson(TABLE, ROW_1, COLUMN_1, VALUE_2);
assertEquals(304, response.getCode());

View File

@ -32,8 +32,6 @@ import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@ -47,9 +45,10 @@ import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import static org.junit.Assert.assertEquals;
@ -202,7 +201,7 @@ public class TestMultiRowResource {
assertEquals(response.getCode(), 200);
ObjectMapper mapper =
new JacksonJaxbJsonProvider().locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE);
CellSetModel cellSet = (CellSetModel) mapper.readValue(response.getBody(), CellSetModel.class);
CellSetModel cellSet = mapper.readValue(response.getBody(), CellSetModel.class);
assertEquals(2, cellSet.getRows().size());
assertEquals(ROW_1, Bytes.toString(cellSet.getRows().get(0).getKey()));
assertEquals(VALUE_1, Bytes.toString(cellSet.getRows().get(0).getCells().get(0).getValue()));

View File

@ -48,8 +48,6 @@ import org.apache.hadoop.hbase.rest.model.TestNamespacesInstanceModel;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
import org.codehaus.jackson.map.ObjectMapper;
import static org.junit.Assert.*;
@ -58,6 +56,9 @@ import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
@Category({RestTests.class, MediumTests.class})
public class TestNamespacesInstanceResource {
private static String NAMESPACE1 = "TestNamespacesInstanceResource1";
@ -265,11 +266,11 @@ public class TestNamespacesInstanceResource {
// Try REST post and puts with invalid content.
response = client.post(namespacePath1, Constants.MIMETYPE_JSON, toXML(model1));
assertEquals(400, response.getCode());
assertEquals(500, response.getCode());
String jsonString = jsonMapper.writeValueAsString(model2);
response = client.put(namespacePath2, Constants.MIMETYPE_XML, Bytes.toBytes(jsonString));
assertEquals(400, response.getCode());
response = client.post(namespacePath3, Constants.MIMETYPE_PROTOBUF, toXML(model1));
response = client.post(namespacePath3, Constants.MIMETYPE_PROTOBUF, toXML(model3));
assertEquals(500, response.getCode());
NamespaceDescriptor nd1 = findNamespace(admin, NAMESPACE1);

View File

@ -21,13 +21,14 @@ package org.apache.hadoop.hbase.rest;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseCommonTestingUtility;
import org.apache.http.Header;
import org.apache.http.message.BasicHeader;
@ -47,6 +48,7 @@ import org.apache.hadoop.hbase.util.Bytes;
import static org.junit.Assert.*;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@ -57,6 +59,8 @@ import org.junit.runners.Parameterized;
@Category({RestTests.class, MediumTests.class})
@RunWith(Parameterized.class)
public class TestSchemaResource {
private static final Log LOG = LogFactory.getLog(TestSchemaResource.class);
private static String TABLE1 = "TestSchemaResource1";
private static String TABLE2 = "TestSchemaResource2";
@ -72,11 +76,8 @@ public class TestSchemaResource {
private static boolean csrfEnabled = true;
@Parameterized.Parameters
public static Collection<Object[]> data() {
List<Object[]> params = new ArrayList<>(2);
params.add(new Object[] {Boolean.TRUE});
params.add(new Object[] {Boolean.FALSE});
return params;
public static Collection<Object[]> parameters() {
return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED;
}
public TestSchemaResource(Boolean csrf) {
@ -104,6 +105,21 @@ public class TestSchemaResource {
TEST_UTIL.shutdownMiniCluster();
}
@After
public void tearDown() throws Exception {
Admin admin = TEST_UTIL.getAdmin();
for (String table : new String[] {TABLE1, TABLE2}) {
TableName t = TableName.valueOf(table);
if (admin.tableExists(t)) {
admin.disableTable(t);
admin.deleteTable(t);
}
}
conf.set("hbase.rest.readonly", "false");
}
private static byte[] toXML(TableSchemaModel model) throws JAXBException {
StringWriter writer = new StringWriter();
context.createMarshaller().marshal(model, writer);
@ -123,7 +139,7 @@ public class TestSchemaResource {
Response response;
Admin admin = TEST_UTIL.getAdmin();
assertFalse(admin.tableExists(TableName.valueOf(TABLE1)));
assertFalse("Table " + TABLE1 + " should not exist", admin.tableExists(TableName.valueOf(TABLE1)));
// create the table
model = testTableSchemaModel.buildTestModel(TABLE1);
@ -131,27 +147,28 @@ public class TestSchemaResource {
if (csrfEnabled) {
// test put operation is forbidden without custom header
response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model));
assertEquals(response.getCode(), 400);
assertEquals(400, response.getCode());
}
response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model), extraHdr);
assertEquals(response.getCode(), 201);
assertEquals("put failed with csrf " + (csrfEnabled ? "enabled" : "disabled"),
201, response.getCode());
// recall the same put operation but in read-only mode
conf.set("hbase.rest.readonly", "true");
response = client.put(schemaPath, Constants.MIMETYPE_XML, toXML(model), extraHdr);
assertEquals(response.getCode(), 403);
assertEquals(403, response.getCode());
// retrieve the schema and validate it
response = client.get(schemaPath, Constants.MIMETYPE_XML);
assertEquals(response.getCode(), 200);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type"));
model = fromXML(response.getBody());
testTableSchemaModel.checkModel(model, TABLE1);
// with json retrieve the schema and validate it
response = client.get(schemaPath, Constants.MIMETYPE_JSON);
assertEquals(response.getCode(), 200);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
model = testTableSchemaModel.fromJSON(Bytes.toString(response.getBody()));
testTableSchemaModel.checkModel(model, TABLE1);
@ -164,14 +181,14 @@ public class TestSchemaResource {
// test delete schema operation is forbidden in read-only mode
response = client.delete(schemaPath, extraHdr);
assertEquals(response.getCode(), 403);
assertEquals(403, response.getCode());
// return read-only setting back to default
conf.set("hbase.rest.readonly", "false");
// delete the table and make sure HBase concurs
response = client.delete(schemaPath, extraHdr);
assertEquals(response.getCode(), 200);
assertEquals(200, response.getCode());
assertFalse(admin.tableExists(TableName.valueOf(TABLE1)));
}
@ -191,11 +208,12 @@ public class TestSchemaResource {
if (csrfEnabled) {
// test put operation is forbidden without custom header
response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF, model.createProtobufOutput());
assertEquals(response.getCode(), 400);
assertEquals(400, response.getCode());
}
response = client.put(schemaPath, Constants.MIMETYPE_PROTOBUF,
model.createProtobufOutput(), extraHdr);
assertEquals(response.getCode(), 201);
assertEquals("put failed with csrf " + (csrfEnabled ? "enabled" : "disabled"),
201, response.getCode());
// recall the same put operation but in read-only mode
conf.set("hbase.rest.readonly", "true");

View File

@ -43,6 +43,8 @@ import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.stream.XMLStreamException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@ -61,11 +63,6 @@ import org.apache.hadoop.hbase.rest.model.RowModel;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonToken;
import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
@ -73,8 +70,15 @@ import org.junit.experimental.categories.Category;
import org.xml.sax.InputSource;
import org.xml.sax.XMLReader;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
@Category({RestTests.class, MediumTests.class})
public class TestTableScan {
private static final Log LOG = LogFactory.getLog(TestTableScan.class);
private static final TableName TABLE = TableName.valueOf("TestScanResource");
private static final String CFA = "a";
@ -201,7 +205,7 @@ public class TestTableScan {
builder.append("?");
builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1);
builder.append("&");
builder.append(Constants.SCAN_LIMIT + "=20");
builder.append(Constants.SCAN_LIMIT + "=2");
Response response = client.get("/" + TABLE + builder.toString(),
Constants.MIMETYPE_JSON);
assertEquals(200, response.getCode());
@ -210,7 +214,7 @@ public class TestTableScan {
.locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE);
CellSetModel model = mapper.readValue(response.getStream(), CellSetModel.class);
int count = TestScannerResource.countCellSet(model);
assertEquals(20, count);
assertEquals(2, count);
checkRowsNotNull(model);
//Test scanning with no limit.
@ -305,40 +309,8 @@ public class TestTableScan {
@Test
public void testStreamingJSON() throws Exception {
// Test scanning particular columns with limit.
StringBuilder builder = new StringBuilder();
builder.append("/*");
builder.append("?");
builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1);
builder.append("&");
builder.append(Constants.SCAN_LIMIT + "=20");
Response response = client.get("/" + TABLE + builder.toString(),
Constants.MIMETYPE_JSON);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
ObjectMapper mapper = new JacksonJaxbJsonProvider()
.locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE);
CellSetModel model = mapper.readValue(response.getStream(), CellSetModel.class);
int count = TestScannerResource.countCellSet(model);
assertEquals(20, count);
checkRowsNotNull(model);
//Test scanning with no limit.
builder = new StringBuilder();
builder.append("/*");
builder.append("?");
builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_2);
response = client.get("/" + TABLE + builder.toString(),
Constants.MIMETYPE_JSON);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
model = mapper.readValue(response.getStream(), CellSetModel.class);
count = TestScannerResource.countCellSet(model);
assertEquals(expectedRows2, count);
checkRowsNotNull(model);
//Test with start row and end row.
builder = new StringBuilder();
StringBuilder builder = new StringBuilder();
builder.append("/*");
builder.append("?");
builder.append(Constants.SCAN_COLUMN + "=" + COLUMN_1);
@ -346,11 +318,13 @@ public class TestTableScan {
builder.append(Constants.SCAN_START_ROW + "=aaa");
builder.append("&");
builder.append(Constants.SCAN_END_ROW + "=aay");
response = client.get("/" + TABLE + builder.toString(),
Response response = client.get("/" + TABLE + builder.toString(),
Constants.MIMETYPE_JSON);
assertEquals(200, response.getCode());
count = 0;
int count = 0;
ObjectMapper mapper = new JacksonJaxbJsonProvider()
.locateMapper(CellSetModel.class, MediaType.APPLICATION_JSON_TYPE);
JsonFactory jfactory = new JsonFactory(mapper);
JsonParser jParser = jfactory.createJsonParser(response.getStream());
boolean found = false;
@ -390,7 +364,7 @@ public class TestTableScan {
int rowCount = readProtobufStream(response.getStream());
assertEquals(15, rowCount);
//Test with start row and end row.
//Test with start row and end row.
builder = new StringBuilder();
builder.append("/*");
builder.append("?");

View File

@ -36,10 +36,11 @@ import org.apache.hadoop.hbase.rest.model.VersionModel;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
import org.codehaus.jackson.map.ObjectMapper;
import org.glassfish.jersey.servlet.ServletContainer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
import static org.junit.Assert.*;
import org.junit.AfterClass;
@ -99,7 +100,7 @@ public class TestVersionResource {
@Test
public void testGetStargateVersionText() throws IOException {
Response response = client.get("/version", Constants.MIMETYPE_TEXT);
assertTrue(response.getCode() == 200);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_TEXT, response.getHeader("content-type"));
String body = Bytes.toString(response.getBody());
assertTrue(body.length() > 0);
@ -117,7 +118,7 @@ public class TestVersionResource {
@Test
public void testGetStargateVersionXML() throws IOException, JAXBException {
Response response = client.get("/version", Constants.MIMETYPE_XML);
assertTrue(response.getCode() == 200);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type"));
VersionModel model = (VersionModel)
context.createUnmarshaller().unmarshal(
@ -129,7 +130,7 @@ public class TestVersionResource {
@Test
public void testGetStargateVersionJSON() throws IOException {
Response response = client.get("/version", Constants.MIMETYPE_JSON);
assertTrue(response.getCode() == 200);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
ObjectMapper mapper = new JacksonJaxbJsonProvider()
.locateMapper(VersionModel.class, MediaType.APPLICATION_JSON_TYPE);
@ -142,13 +143,13 @@ public class TestVersionResource {
@Test
public void testGetStargateVersionPB() throws IOException {
Response response = client.get("/version", Constants.MIMETYPE_PROTOBUF);
assertTrue(response.getCode() == 200);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_PROTOBUF, response.getHeader("content-type"));
VersionModel model = new VersionModel();
model.getObjectFromMessage(response.getBody());
validate(model);
response = client.get("/version", Constants.MIMETYPE_PROTOBUF_IETF);
assertTrue(response.getCode() == 200);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_PROTOBUF_IETF, response.getHeader("content-type"));
model = new VersionModel();
model.getObjectFromMessage(response.getBody());
@ -158,7 +159,7 @@ public class TestVersionResource {
@Test
public void testGetStorageClusterVersionText() throws IOException {
Response response = client.get("/version/cluster", Constants.MIMETYPE_TEXT);
assertTrue(response.getCode() == 200);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_TEXT, response.getHeader("content-type"));
}
@ -166,7 +167,7 @@ public class TestVersionResource {
public void testGetStorageClusterVersionXML() throws IOException,
JAXBException {
Response response = client.get("/version/cluster",Constants.MIMETYPE_XML);
assertTrue(response.getCode() == 200);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_XML, response.getHeader("content-type"));
StorageClusterVersionModel clusterVersionModel =
(StorageClusterVersionModel)
@ -180,7 +181,7 @@ public class TestVersionResource {
@Test
public void testGetStorageClusterVersionJSON() throws IOException {
Response response = client.get("/version/cluster", Constants.MIMETYPE_JSON);
assertTrue(response.getCode() == 200);
assertEquals(200, response.getCode());
assertEquals(Constants.MIMETYPE_JSON, response.getHeader("content-type"));
ObjectMapper mapper = new JacksonJaxbJsonProvider()
.locateMapper(StorageClusterVersionModel.class, MediaType.APPLICATION_JSON_TYPE);

View File

@ -62,14 +62,14 @@ public class TestColumnSchemaModel extends TestModelBase<ColumnSchemaModel> {
}
protected void checkModel(ColumnSchemaModel model) {
assertEquals(model.getName(), COLUMN_NAME);
assertEquals(model.__getBlockcache(), BLOCKCACHE);
assertEquals(model.__getBlocksize(), BLOCKSIZE);
assertEquals(model.__getBloomfilter(), BLOOMFILTER);
assertTrue(model.__getCompression().equalsIgnoreCase(COMPRESSION));
assertEquals(model.__getInMemory(), IN_MEMORY);
assertEquals(model.__getTTL(), TTL);
assertEquals(model.__getVersions(), VERSIONS);
assertEquals("name", COLUMN_NAME, model.getName());
assertEquals("block cache", BLOCKCACHE, model.__getBlockcache());
assertEquals("block size", BLOCKSIZE, model.__getBlocksize());
assertEquals("bloomfilter", BLOOMFILTER, model.__getBloomfilter());
assertTrue("compression", model.__getCompression().equalsIgnoreCase(COMPRESSION));
assertEquals("in memory", IN_MEMORY, model.__getInMemory());
assertEquals("ttl", TTL, model.__getTTL());
assertEquals("versions", VERSIONS, model.__getVersions());
}
public void testFromPB() throws Exception {

View File

@ -24,9 +24,6 @@ import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.rest.ProtobufMessageHandler;
import org.apache.hadoop.hbase.rest.provider.JAXBContextResolver;
import org.apache.hadoop.hbase.util.Base64;
import org.codehaus.jackson.jaxrs.JacksonJaxbJsonProvider;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.ObjectNode;
import org.junit.experimental.categories.Category;
import javax.ws.rs.core.MediaType;
@ -35,6 +32,9 @@ import javax.xml.bind.JAXBException;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.jaxrs.json.JacksonJaxbJsonProvider;
@Category({RestTests.class, SmallTests.class})
public abstract class TestModelBase<T> extends TestCase {

View File

@ -23,6 +23,8 @@ import java.util.Iterator;
import javax.xml.bind.JAXBContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
@ -30,6 +32,7 @@ import org.junit.experimental.categories.Category;
@Category({RestTests.class, SmallTests.class})
public class TestTableSchemaModel extends TestModelBase<TableSchemaModel> {
private static final Log LOG = LogFactory.getLog(TestTableSchemaModel.class);
public static final String TABLE_NAME = "testTable";
private static final boolean IS_META = false;

View File

@ -483,12 +483,12 @@
<artifactId>jetty-security</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson1</artifactId>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
</dependency>
<dependency>
<!--For JspC used in ant task-->
@ -502,10 +502,6 @@
<groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jettison</groupId>
<artifactId>jettison</artifactId>
@ -516,10 +512,6 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</dependency>
<!-- General dependencies -->
<dependency>
<groupId>com.github.stephenc.findbugs</groupId>

View File

@ -17,8 +17,8 @@
*/
package org.apache.hadoop.hbase.io.hfile;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import org.apache.hadoop.hbase.metrics.impl.FastLongHistogram;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
/**
* Snapshot of block cache age in cache.

View File

@ -24,16 +24,15 @@ import java.util.NavigableSet;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.concurrent.ConcurrentSkipListSet;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.metrics.impl.FastLongHistogram;
import org.apache.hadoop.hbase.util.Bytes;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig;
/**
* Utilty for aggregating counts in CachedBlocks and toString/toJSON CachedBlocks and BlockCaches.
@ -50,9 +49,9 @@ public class BlockCacheUtil {
*/
private static final ObjectMapper MAPPER = new ObjectMapper();
static {
MAPPER.configure(SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false);
MAPPER.configure(SerializationConfig.Feature.FLUSH_AFTER_WRITE_VALUE, true);
MAPPER.configure(SerializationConfig.Feature.INDENT_OUTPUT, true);
MAPPER.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
MAPPER.configure(SerializationFeature.FLUSH_AFTER_WRITE_VALUE, true);
MAPPER.configure(SerializationFeature.INDENT_OUTPUT, true);
}
/**

View File

@ -45,14 +45,15 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.HasThread;
import org.apache.hadoop.util.StringUtils;
import org.codehaus.jackson.annotate.JsonIgnore;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Objects;
import org.apache.hadoop.hbase.shaded.com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* A block cache implementation that is memory-aware using {@link HeapSize},
* memory-bound using an LRU eviction algorithm, and concurrent: backed by a

View File

@ -37,7 +37,7 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import org.apache.hadoop.hbase.shaded.com.google.common.base.MoreObjects;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;

View File

@ -36,6 +36,7 @@ import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.LongAdder;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@ -68,7 +69,6 @@ import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingService;

View File

@ -18,8 +18,8 @@
*/
package org.apache.hadoop.hbase.monitoring;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.yetus.audience.InterfaceAudience;
import org.codehaus.jackson.map.ObjectMapper;
import java.io.IOException;
import java.util.HashMap;

View File

@ -41,11 +41,11 @@ import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeType;
import javax.management.openmbean.TabularData;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.JsonGenerator;
/**
* Utility for doing JSON and MBeans.

View File

@ -38,11 +38,11 @@ import javax.management.ObjectName;
import javax.management.ReflectionException;
import javax.management.openmbean.CompositeData;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.JsonProcessingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jettison.json.JSONException;
public final class JSONMetricUtil {
@ -112,8 +112,8 @@ public final class JSONMetricUtil {
return sw.toString();
}
public static JsonNode mappStringToJsonNode(String jsonString) throws
JsonProcessingException, IOException {
public static JsonNode mappStringToJsonNode(String jsonString)
throws JsonProcessingException, IOException {
ObjectMapper mapper = new ObjectMapper();
JsonNode node = mapper.readTree(jsonString);
return node;

View File

@ -27,6 +27,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
@ -47,7 +48,6 @@ import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.codehaus.jackson.map.ObjectMapper;
/**
* WALPrettyPrinter prints the contents of a given WAL with a variety of

View File

@ -29,7 +29,7 @@
import="java.lang.management.GarbageCollectorMXBean"
import="org.apache.hadoop.hbase.util.JSONMetricUtil"
import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
import="org.codehaus.jackson.JsonNode"
import="com.fasterxml.jackson.databind.JsonNode"
%>
<%
RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();

View File

@ -29,7 +29,7 @@
import="java.lang.management.GarbageCollectorMXBean"
import="org.apache.hadoop.hbase.util.JSONMetricUtil"
import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
import="org.codehaus.jackson.JsonNode"
import="com.fasterxml.jackson.databind.JsonNode"
%>
<%
RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();

View File

@ -29,7 +29,7 @@
import="java.lang.management.GarbageCollectorMXBean"
import="org.apache.hadoop.hbase.util.JSONMetricUtil"
import="org.apache.hadoop.hbase.procedure2.util.StringUtils"
import="org.codehaus.jackson.JsonNode"
import="com.fasterxml.jackson.databind.JsonNode"
%>
<%
RuntimeMXBean runtimeBean = ManagementFactory.getRuntimeMXBean();

View File

@ -23,6 +23,8 @@ import java.io.IOException;
import java.util.Map;
import java.util.NavigableSet;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.databind.JsonMappingException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
@ -32,8 +34,6 @@ import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.io.hfile.TestCacheConfig.DataCacheEntry;
import org.apache.hadoop.hbase.io.hfile.TestCacheConfig.IndexCacheEntry;
import org.codehaus.jackson.JsonGenerationException;
import org.codehaus.jackson.map.JsonMappingException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

View File

@ -22,6 +22,7 @@ import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
@ -29,13 +30,14 @@ import javax.management.openmbean.CompositeData;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.JsonProcessingException;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@ -51,17 +53,14 @@ public class TestJSONMetricUtil {
String[] values = {"MemoryPool", "Par Eden Space"};
String[] values2 = {"MemoryPool", "Par Eden Space", "Test"};
String[] emptyValue = {};
Hashtable<String, String> properties = JSONMetricUtil.buldKeyValueTable(keys, values);
Hashtable<String, String> nullObject = JSONMetricUtil.buldKeyValueTable(keys, values2);
Hashtable<String, String> nullObject1 = JSONMetricUtil.buldKeyValueTable(keys, emptyValue);
Hashtable<String, String> nullObject2 = JSONMetricUtil.buldKeyValueTable(emptyKey, values2);
Hashtable<String, String> nullObject3 = JSONMetricUtil.buldKeyValueTable(emptyKey, emptyValue);
assertEquals(properties.get("type"), values[0]);
assertEquals(properties.get("name"), values[1]);
assertEquals(nullObject, null);
assertEquals(nullObject1, null);
assertEquals(nullObject2, null);
assertEquals(nullObject3, null);
Map<String, String> properties = JSONMetricUtil.buldKeyValueTable(keys, values);
assertEquals(values[0], properties.get("type"));
assertEquals(values[1], properties.get("name"));
assertNull(JSONMetricUtil.buldKeyValueTable(keys, values2));
assertNull(JSONMetricUtil.buldKeyValueTable(keys, emptyValue));
assertNull(JSONMetricUtil.buldKeyValueTable(emptyKey, values2));
assertNull(JSONMetricUtil.buldKeyValueTable(emptyKey, emptyValue));
}
@Test
@ -73,10 +72,10 @@ public class TestJSONMetricUtil {
JsonNode r2 = JSONMetricUtil.searchJson(node, "data2");
JsonNode r3 = JSONMetricUtil.searchJson(node, "data3");
JsonNode r4 = JSONMetricUtil.searchJson(node, "data4");
assertEquals(r1.getIntValue(), 100);
assertEquals(r2.getTextValue(), "hello");
assertEquals(r3.get(0).getIntValue(), 1);
assertEquals(r4.getIntValue(), 0);
assertEquals(100, r1.intValue());
assertEquals("hello", r2.textValue());
assertEquals(1, r3.get(0).intValue());
assertEquals(0, r4.intValue());
}
@Test

View File

@ -145,10 +145,6 @@
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson1</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>

View File

@ -176,6 +176,10 @@
<pattern>com.dropwizard</pattern>
<shadedPattern>org.apache.hadoop.hbase.shaded.com.dropwizard</shadedPattern>
</relocation>
<relocation>
<pattern>com.fasterxml</pattern>
<shadedPattern>org.apache.hadoop.hbase.shaded.com.fasterxml</shadedPattern>
</relocation>
<!-- top level io -->
<relocation>

View File

@ -74,7 +74,7 @@ module Hbase
# Returns a filtered list of tasks on the given host
def tasksOnHost(filter, host)
java_import 'java.net.URL'
java_import 'org.codehaus.jackson.map.ObjectMapper'
java_import 'com.fasterxml.jackson.databind.ObjectMapper'
infoport = @admin.getClusterStatus.getLoad(host).getInfoServerPort.to_s

View File

@ -123,6 +123,21 @@
<version>3.1.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
<version>${jackson.version}</version>
<exclusions>
<exclusion>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
</exclusion>
<exclusion>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
@ -334,6 +349,11 @@
<artifactId>hbase-protocol</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-protocol-shaded</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>

97
pom.xml
View File

@ -1399,11 +1399,10 @@
<httpclient.version>4.5.3</httpclient.version>
<httpcore.version>4.4.6</httpcore.version>
<metrics-core.version>3.2.1</metrics-core.version>
<jackson.version>2.23.2</jackson.version>
<jackson.version>2.9.1</jackson.version>
<jaxb-api.version>2.2.12</jaxb-api.version>
<jetty.version>9.4.6.v20170531</jetty.version>
<jetty-jsp.version>9.2.19.v20160908</jetty-jsp.version>
<jackson1.version>1.9.13</jackson1.version>
<servlet.api.version>3.1.0</servlet.api.version>
<wx.rs.api.version>2.0.1</wx.rs.api.version>
<jersey.version>2.25.1</jersey.version>
@ -1871,28 +1870,15 @@
<artifactId>joni</artifactId>
<version>${joni.version}</version>
</dependency>
<!-- While jackson is also a dependency of jersey it
can bring in jars from different, incompatible versions. We force
the same version with these dependencies -->
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<version>${jackson1.version}</version>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>${jackson1.version}</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
<version>${jackson1.version}</version>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
<version>${jackson1.version}</version>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>org.jamon</groupId>
@ -1981,11 +1967,6 @@
<artifactId>jersey-client</artifactId>
<version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson1</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<!--This lib has JspC in it. Needed precompiling jsps in hbase-rest, etc.-->
<groupId>org.glassfish.web</groupId>
@ -2409,6 +2390,14 @@
<groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -2467,6 +2456,14 @@
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
</exclusions>
<version>${hadoop-two.version}</version>
</dependency>
@ -2497,6 +2494,14 @@
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -2537,6 +2542,14 @@
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -2623,6 +2636,14 @@
<groupId>javax.inject</groupId>
<artifactId>javax.inject</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -2669,6 +2690,14 @@
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
</exclusions>
<version>${hadoop-three.version}</version>
</dependency>
@ -2695,6 +2724,14 @@
<groupId>xerces</groupId>
<artifactId>xercesImpl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -2741,6 +2778,14 @@
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehause.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehause.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>