HDFS-11712. Ozone: Reuse ObjectMapper instance to improve the performance. Contributed by Yiqun Lin.

This commit is contained in:
Anu Engineer 2017-05-03 14:25:41 -07:00 committed by Owen O'Malley
parent ee8708ca31
commit a28557ac03
17 changed files with 322 additions and 231 deletions

View File

@ -19,19 +19,35 @@
package org.apache.hadoop.ozone.web.exceptions;
import org.apache.hadoop.classification.InterfaceAudience;
import org.codehaus.jackson.annotate.JsonAutoDetect;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.ObjectMapper;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
/**
* Class the represents various errors returned by the
* Ozone Layer.
*/
@InterfaceAudience.Private
public class OzoneException extends Exception {
private static final ObjectReader READER =
new ObjectMapper().readerFor(OzoneException.class);
private static final ObjectMapper MAPPER;
static {
MAPPER = new ObjectMapper();
MAPPER.setVisibility(
MAPPER.getSerializationConfig().getDefaultVisibilityChecker()
.withCreatorVisibility(JsonAutoDetect.Visibility.NONE)
.withFieldVisibility(JsonAutoDetect.Visibility.NONE)
.withGetterVisibility(JsonAutoDetect.Visibility.NONE)
.withIsGetterVisibility(JsonAutoDetect.Visibility.NONE)
.withSetterVisibility(JsonAutoDetect.Visibility.NONE));
}
@JsonProperty("httpCode")
private long httpCode;
@ -211,15 +227,7 @@ public class OzoneException extends Exception {
*/
public String toJsonString() {
try {
ObjectMapper mapper = new ObjectMapper();
mapper.setVisibilityChecker(
mapper.getSerializationConfig().getDefaultVisibilityChecker()
.withCreatorVisibility(JsonAutoDetect.Visibility.NONE)
.withFieldVisibility(JsonAutoDetect.Visibility.NONE)
.withGetterVisibility(JsonAutoDetect.Visibility.NONE)
.withIsGetterVisibility(JsonAutoDetect.Visibility.NONE)
.withSetterVisibility(JsonAutoDetect.Visibility.NONE));
return mapper.writeValueAsString(this);
return MAPPER.writeValueAsString(this);
} catch (IOException ex) {
// TODO : Log this error on server side.
}
@ -237,7 +245,6 @@ public class OzoneException extends Exception {
* @throws IOException
*/
public static OzoneException parse(String jsonString) throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(jsonString, OzoneException.class);
return READER.readValue(jsonString);
}
}

View File

@ -24,7 +24,7 @@ import org.apache.hadoop.ozone.web.client.OzoneVolume;
import org.apache.hadoop.ozone.web.exceptions.OzoneException;
import org.apache.hadoop.ozone.web.ozShell.Handler;
import org.apache.hadoop.ozone.web.ozShell.Shell;
import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import java.io.IOException;
import java.net.URI;
@ -89,11 +89,8 @@ public class CreateBucketHandler extends Handler {
OzoneBucket bucket = vol.createBucket(bucketName);
if (cmd.hasOption(Shell.VERBOSE)) {
ObjectMapper mapper = new ObjectMapper();
Object json = mapper.readValue(bucket.getBucketInfo().toJsonString(),
Object.class);
System.out.printf("%s%n", mapper.writerWithDefaultPrettyPrinter()
.writeValueAsString(json));
System.out.printf("%s%n", JsonUtils.toJsonStringWithDefaultPrettyPrinter(
bucket.getBucketInfo().toJsonString()));
}
}
}

View File

@ -25,7 +25,7 @@ import org.apache.hadoop.ozone.web.client.OzoneVolume;
import org.apache.hadoop.ozone.web.exceptions.OzoneException;
import org.apache.hadoop.ozone.web.ozShell.Handler;
import org.apache.hadoop.ozone.web.ozShell.Shell;
import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import java.io.IOException;
import java.net.URI;
@ -87,11 +87,8 @@ public class InfoBucketHandler extends Handler {
OzoneVolume vol = client.getVolume(volumeName);
OzoneBucket bucket = vol.getBucket(bucketName);
ObjectMapper mapper = new ObjectMapper();
Object json =
mapper.readValue(bucket.getBucketInfo().toJsonString(), Object.class);
System.out.printf("%s%n", mapper.writerWithDefaultPrettyPrinter()
.writeValueAsString(json));
System.out.printf("%s%n", JsonUtils.toJsonStringWithDefaultPrettyPrinter(
bucket.getBucketInfo().toJsonString()));
}
}

View File

@ -25,7 +25,7 @@ import org.apache.hadoop.ozone.web.client.OzoneVolume;
import org.apache.hadoop.ozone.web.exceptions.OzoneException;
import org.apache.hadoop.ozone.web.ozShell.Handler;
import org.apache.hadoop.ozone.web.ozShell.Shell;
import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import java.io.IOException;
import java.net.URI;
@ -85,14 +85,9 @@ public class ListBucketHandler extends Handler {
OzoneVolume vol = client.getVolume(volumeName);
List<OzoneBucket> bucketList = vol.listBuckets();
ObjectMapper mapper = new ObjectMapper();
for (OzoneBucket bucket : bucketList) {
Object json =
mapper.readValue(bucket.getBucketInfo().toJsonString(), Object.class);
System.out.printf("%s%n", mapper.writerWithDefaultPrettyPrinter()
.writeValueAsString(json));
System.out.printf("%s%n", JsonUtils.toJsonStringWithDefaultPrettyPrinter(
bucket.getBucketInfo().toJsonString()));
}
}
}

View File

@ -24,7 +24,7 @@ import org.apache.hadoop.ozone.web.client.OzoneVolume;
import org.apache.hadoop.ozone.web.exceptions.OzoneException;
import org.apache.hadoop.ozone.web.ozShell.Handler;
import org.apache.hadoop.ozone.web.ozShell.Shell;
import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import java.io.IOException;
import java.net.URI;
@ -88,11 +88,7 @@ public class UpdateBucketHandler extends Handler {
}
OzoneBucket bucket = vol.getBucket(bucketName);
ObjectMapper mapper = new ObjectMapper();
Object json =
mapper.readValue(bucket.getBucketInfo().toJsonString(), Object.class);
System.out.printf("%s%n", mapper.writerWithDefaultPrettyPrinter()
.writeValueAsString(json));
System.out.printf("%s%n", JsonUtils.toJsonStringWithDefaultPrettyPrinter(
bucket.getBucketInfo().toJsonString()));
}
}

View File

@ -26,7 +26,7 @@ import org.apache.hadoop.ozone.web.client.OzoneVolume;
import org.apache.hadoop.ozone.web.exceptions.OzoneException;
import org.apache.hadoop.ozone.web.ozShell.Handler;
import org.apache.hadoop.ozone.web.ozShell.Shell;
import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import java.io.IOException;
import java.net.URI;
@ -90,16 +90,10 @@ public class ListKeyHandler extends Handler {
OzoneBucket bucket = vol.getBucket(bucketName);
List<OzoneKey> keys = bucket.listKeys();
ObjectMapper mapper = new ObjectMapper();
for (OzoneKey key : keys) {
Object json =
mapper.readValue(key.getObjectInfo().toJsonString(), Object.class);
System.out.printf("%s%n", mapper.writerWithDefaultPrettyPrinter()
.writeValueAsString(json));
System.out.printf("%s%n", JsonUtils.toJsonStringWithDefaultPrettyPrinter(
key.getObjectInfo().toJsonString()));
}
}
}

View File

@ -24,7 +24,7 @@ import org.apache.hadoop.ozone.web.client.OzoneVolume;
import org.apache.hadoop.ozone.web.exceptions.OzoneException;
import org.apache.hadoop.ozone.web.ozShell.Handler;
import org.apache.hadoop.ozone.web.ozShell.Shell;
import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import java.io.IOException;
import java.net.URI;
@ -89,13 +89,10 @@ public class CreateVolumeHandler extends Handler {
client.setEndPointURI(ozoneURI);
client.setUserAuth(rootName);
OzoneVolume vol = client.createVolume(volumeName, userName, quota);
if (cmd.hasOption(Shell.VERBOSE)) {
ObjectMapper mapper = new ObjectMapper();
Object json = mapper.readValue(vol.getJsonString(), Object.class);
System.out.printf("%s%n", mapper.writerWithDefaultPrettyPrinter()
.writeValueAsString(json));
System.out.printf("%s%n",
JsonUtils.toJsonStringWithDefaultPrettyPrinter(vol.getJsonString()));
}
}
}

View File

@ -24,7 +24,7 @@ import org.apache.hadoop.ozone.web.client.OzoneVolume;
import org.apache.hadoop.ozone.web.exceptions.OzoneException;
import org.apache.hadoop.ozone.web.ozShell.Handler;
import org.apache.hadoop.ozone.web.ozShell.Shell;
import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import java.io.IOException;
import java.net.URI;
@ -89,11 +89,7 @@ public class InfoVolumeHandler extends Handler{
client.setUserAuth(rootName);
OzoneVolume vol = client.getVolume(volumeName);
ObjectMapper mapper = new ObjectMapper();
Object json = mapper.readValue(vol.getJsonString(), Object.class);
System.out.printf("%s%n", mapper.writerWithDefaultPrettyPrinter()
.writeValueAsString(json));
System.out.printf("%s%n",
JsonUtils.toJsonStringWithDefaultPrettyPrinter(vol.getJsonString()));
}
}

View File

@ -24,7 +24,7 @@ import org.apache.hadoop.ozone.web.client.OzoneVolume;
import org.apache.hadoop.ozone.web.exceptions.OzoneException;
import org.apache.hadoop.ozone.web.ozShell.Handler;
import org.apache.hadoop.ozone.web.ozShell.Shell;
import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import java.io.IOException;
import java.net.URI;
@ -86,14 +86,11 @@ public class ListVolumeHandler extends Handler {
System.out.printf("Found : %d volumes for user : %s %n", volumes.size(),
userName);
}
ObjectMapper mapper = new ObjectMapper();
for (OzoneVolume vol : volumes) {
Object json = mapper.readValue(vol.getJsonString(), Object.class);
System.out.printf("%s%n", mapper.writerWithDefaultPrettyPrinter()
.writeValueAsString(json));
System.out.printf("%s%n", JsonUtils
.toJsonStringWithDefaultPrettyPrinter(vol.getJsonString()));
}
}
}
}

View File

@ -17,29 +17,48 @@
*/
package org.apache.hadoop.ozone.web.response;
import com.google.common.base.Preconditions;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.ozone.web.request.OzoneAcl;
import org.apache.hadoop.ozone.OzoneConsts;
import org.codehaus.jackson.annotate.JsonAutoDetect;
import org.codehaus.jackson.annotate.JsonMethod;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.codehaus.jackson.map.annotate.JsonFilter;
import org.codehaus.jackson.map.ser.FilterProvider;
import org.codehaus.jackson.map.ser.impl.SimpleBeanPropertyFilter;
import org.codehaus.jackson.map.ser.impl.SimpleFilterProvider;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.web.request.OzoneAcl;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonFilter;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.ser.FilterProvider;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
import com.google.common.base.Preconditions;
/**
* BucketInfo class, this is used as response class to send
* Json info about a bucket back to a client.
*/
public class BucketInfo implements Comparable<BucketInfo> {
static final String BUCKET_INFO = "BUCKET_INFO_FILTER";
private static final ObjectReader READER =
new ObjectMapper().readerFor(BucketInfo.class);
private static final ObjectWriter WRITER;
static {
ObjectMapper mapper = new ObjectMapper();
String[] ignorableFieldNames = {"bytesUsed", "keyCount"};
FilterProvider filters = new SimpleFilterProvider().addFilter(BUCKET_INFO,
SimpleBeanPropertyFilter.serializeAllExcept(ignorableFieldNames));
mapper.setVisibility(PropertyAccessor.FIELD, JsonAutoDetect.Visibility.ANY);
mapper.addMixIn(Object.class, MixIn.class);
WRITER = mapper.writer(filters);
}
private String volumeName;
private String bucketName;
private List<OzoneAcl> acls;
@ -77,8 +96,7 @@ public class BucketInfo implements Comparable<BucketInfo> {
* @throws IOException
*/
public static BucketInfo parse(String jsonString) throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(jsonString, BucketInfo.class);
return READER.readValue(jsonString);
}
/**
@ -161,18 +179,7 @@ public class BucketInfo implements Comparable<BucketInfo> {
* @return String
*/
public String toJsonString() throws IOException {
ObjectMapper mapper = new ObjectMapper();
String[] ignorableFieldNames = {"bytesUsed", "keyCount"};
FilterProvider filters = new SimpleFilterProvider().addFilter(
BUCKET_INFO,
SimpleBeanPropertyFilter.serializeAllExcept(ignorableFieldNames));
mapper.setVisibility(JsonMethod.FIELD, JsonAutoDetect.Visibility.ANY);
mapper.getSerializationConfig()
.addMixInAnnotations(Object.class, MixIn.class);
ObjectWriter writer = mapper.writer(filters);
return writer.writeValueAsString(this);
return WRITER.writeValueAsString(this);
}
/**
@ -185,8 +192,7 @@ public class BucketInfo implements Comparable<BucketInfo> {
* fields vs. only fields that are part of REST protocol.
*/
public String toDBString() throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(this);
return JsonUtils.toJsonString(this);
}
/**

View File

@ -17,24 +17,46 @@
*/
package org.apache.hadoop.ozone.web.response;
import java.io.IOException;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.codehaus.jackson.annotate.JsonAutoDetect;
import org.codehaus.jackson.annotate.JsonMethod;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.codehaus.jackson.map.annotate.JsonFilter;
import org.codehaus.jackson.map.ser.FilterProvider;
import org.codehaus.jackson.map.ser.impl.SimpleBeanPropertyFilter;
import org.codehaus.jackson.map.ser.impl.SimpleFilterProvider;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import java.io.IOException;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonFilter;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.ser.FilterProvider;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
/**
* Represents an Ozone key Object.
*/
public class KeyInfo implements Comparable<KeyInfo> {
static final String OBJECT_INFO = "OBJECT_INFO_FILTER";
private static final ObjectReader READER =
new ObjectMapper().readerFor(KeyInfo.class);
private static final ObjectWriter WRITER;
static {
ObjectMapper mapper = new ObjectMapper();
String[] ignorableFieldNames = {"dataFileName"};
FilterProvider filters = new SimpleFilterProvider()
.addFilter(OBJECT_INFO, SimpleBeanPropertyFilter
.serializeAllExcept(ignorableFieldNames));
mapper.setVisibility(PropertyAccessor.FIELD,
JsonAutoDetect.Visibility.ANY);
mapper.addMixIn(Object.class, MixIn.class);
WRITER = mapper.writer(filters);
}
/**
* This class allows us to create custom filters
* for the Json serialization.
@ -225,8 +247,7 @@ public class KeyInfo implements Comparable<KeyInfo> {
* @throws IOException
*/
public static KeyInfo parse(String jsonString) throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(jsonString, KeyInfo.class);
return READER.readValue(jsonString);
}
@ -237,25 +258,13 @@ public class KeyInfo implements Comparable<KeyInfo> {
* @return String
*/
public String toJsonString() throws IOException {
String[] ignorableFieldNames = {"dataFileName"};
FilterProvider filters = new SimpleFilterProvider()
.addFilter(OBJECT_INFO, SimpleBeanPropertyFilter
.serializeAllExcept(ignorableFieldNames));
ObjectMapper mapper = new ObjectMapper()
.setVisibility(JsonMethod.FIELD, JsonAutoDetect.Visibility.ANY);
mapper.getSerializationConfig()
.addMixInAnnotations(Object.class, MixIn.class);
ObjectWriter writer = mapper.writer(filters);
return writer.writeValueAsString(this);
return WRITER.writeValueAsString(this);
}
/**
* Returns the Object as a Json String.
*/
public String toDBString() throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(this);
return JsonUtils.toJsonString(this);
}
}

View File

@ -18,25 +18,46 @@
package org.apache.hadoop.ozone.web.response;
import org.codehaus.jackson.annotate.JsonAutoDetect;
import org.codehaus.jackson.annotate.JsonMethod;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.codehaus.jackson.map.annotate.JsonFilter;
import org.codehaus.jackson.map.ser.FilterProvider;
import org.codehaus.jackson.map.ser.impl.SimpleBeanPropertyFilter;
import org.codehaus.jackson.map.ser.impl.SimpleFilterProvider;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonFilter;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.ser.FilterProvider;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
/**
* List Bucket is the response for the ListBucket Query.
*/
public class ListBuckets {
static final String BUCKET_LIST = "BUCKET_LIST_FILTER";
private static final ObjectReader READER =
new ObjectMapper().readerFor(ListBuckets.class);
private static final ObjectWriter WRITER;
static {
ObjectMapper mapper = new ObjectMapper();
String[] ignorableFieldNames = {"dataFileName"};
FilterProvider filters = new SimpleFilterProvider()
.addFilter(BUCKET_LIST, SimpleBeanPropertyFilter
.serializeAllExcept(ignorableFieldNames));
mapper.setVisibility(PropertyAccessor.FIELD,
JsonAutoDetect.Visibility.ANY);
mapper.addMixIn(Object.class, MixIn.class);
WRITER = mapper.writer(filters);
}
private List<BucketInfo> buckets;
/**
@ -65,8 +86,7 @@ public class ListBuckets {
* @throws IOException
*/
public static ListBuckets parse(String data) throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(data, ListBuckets.class);
return READER.readValue(data);
}
/**
@ -95,27 +115,14 @@ public class ListBuckets {
* @return String
*/
public String toJsonString() throws IOException {
ObjectMapper mapper = new ObjectMapper();
String[] ignorableFieldNames = {"bytesUsed", "keyCount"};
FilterProvider filters = new SimpleFilterProvider()
.addFilter(BUCKET_LIST, SimpleBeanPropertyFilter
.serializeAllExcept(ignorableFieldNames));
mapper.setVisibility(JsonMethod.FIELD, JsonAutoDetect.Visibility.ANY);
mapper.getSerializationConfig()
.addMixInAnnotations(Object.class, MixIn.class);
ObjectWriter writer = mapper.writer(filters);
return writer.writeValueAsString(this);
return WRITER.writeValueAsString(this);
}
/**
* Returns the Object as a Json String.
*/
public String toDBString() throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(this);
return JsonUtils.toJsonString(this);
}
/**

View File

@ -17,28 +17,50 @@
*/
package org.apache.hadoop.ozone.web.response;
import com.google.common.base.Preconditions;
import org.apache.hadoop.ozone.web.handlers.BucketArgs;
import org.apache.hadoop.ozone.web.handlers.ListArgs;
import org.codehaus.jackson.annotate.JsonAutoDetect;
import org.codehaus.jackson.annotate.JsonMethod;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.codehaus.jackson.map.annotate.JsonFilter;
import org.codehaus.jackson.map.ser.FilterProvider;
import org.codehaus.jackson.map.ser.impl.SimpleBeanPropertyFilter;
import org.codehaus.jackson.map.ser.impl.SimpleFilterProvider;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.ozone.web.handlers.BucketArgs;
import org.apache.hadoop.ozone.web.handlers.ListArgs;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonFilter;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.ser.FilterProvider;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
import com.google.common.base.Preconditions;
/**
* This class the represents the list of keys (Objects) in a bucket.
*/
public class ListKeys {
static final String OBJECT_LIST = "OBJECT_LIST_FILTER";
private static final ObjectReader READER =
new ObjectMapper().readerFor(ListKeys.class);
private static final ObjectWriter WRITER;
static {
ObjectMapper mapper = new ObjectMapper();
String[] ignorableFieldNames = {"dataFileName"};
FilterProvider filters = new SimpleFilterProvider()
.addFilter(OBJECT_LIST, SimpleBeanPropertyFilter
.serializeAllExcept(ignorableFieldNames));
mapper.setVisibility(PropertyAccessor.FIELD,
JsonAutoDetect.Visibility.ANY);
mapper.addMixIn(Object.class, MixIn.class);
WRITER = mapper.writer(filters);
}
private String name;
private String prefix;
private long maxKeys;
@ -73,8 +95,7 @@ public class ListKeys {
* @throws IOException - Json conversion error.
*/
public static ListKeys parse(String jsonString) throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(jsonString, ListKeys.class);
return READER.readValue(jsonString);
}
/**
@ -148,17 +169,7 @@ public class ListKeys {
* @throws IOException - On json Errors.
*/
public String toJsonString() throws IOException {
String[] ignorableFieldNames = {"dataFileName"};
FilterProvider filters = new SimpleFilterProvider().addFilter(OBJECT_LIST,
SimpleBeanPropertyFilter.serializeAllExcept(ignorableFieldNames));
ObjectMapper mapper = new ObjectMapper()
.setVisibility(JsonMethod.FIELD, JsonAutoDetect.Visibility.ANY);
mapper.getSerializationConfig()
.addMixInAnnotations(Object.class, MixIn.class);
ObjectWriter writer = mapper.writer(filters);
return writer.writeValueAsString(this);
return WRITER.writeValueAsString(this);
}
/**
@ -168,8 +179,7 @@ public class ListKeys {
* @throws IOException - on json errors.
*/
public String toDBString() throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(this);
return JsonUtils.toJsonString(this);
}
/**

View File

@ -17,21 +17,24 @@
*/
package org.apache.hadoop.ozone.web.response;
import org.apache.hadoop.classification.InterfaceAudience;
import org.codehaus.jackson.annotate.JsonAutoDetect;
import org.codehaus.jackson.annotate.JsonMethod;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.codehaus.jackson.map.annotate.JsonFilter;
import org.codehaus.jackson.map.ser.FilterProvider;
import org.codehaus.jackson.map.ser.impl.SimpleBeanPropertyFilter;
import org.codehaus.jackson.map.ser.impl.SimpleFilterProvider;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonFilter;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.ser.FilterProvider;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
/**
* List Volume Class is the class that is returned in JSON format to
* users when they call ListVolumes.
@ -41,6 +44,23 @@ public class ListVolumes {
private List<VolumeInfo> volumes;
static final String VOLUME_LIST = "VOLUME_LIST_FILTER";
private static final ObjectReader READER =
new ObjectMapper().readerFor(ListVolumes.class);
private static final ObjectWriter WRITER;
static {
ObjectMapper mapper = new ObjectMapper();
String[] ignorableFieldNames = {"bytesUsed", "bucketCount"};
FilterProvider filters = new SimpleFilterProvider()
.addFilter(VOLUME_LIST, SimpleBeanPropertyFilter
.serializeAllExcept(ignorableFieldNames));
mapper.setVisibility(PropertyAccessor.FIELD,
JsonAutoDetect.Visibility.ANY);
mapper.addMixIn(Object.class, MixIn.class);
WRITER = mapper.writer(filters);
}
/**
* Used for json filtering.
@ -82,19 +102,7 @@ public class ListVolumes {
* @return String
*/
public String toJsonString() throws IOException {
ObjectMapper mapper = new ObjectMapper();
String[] ignorableFieldNames = {"bytesUsed", "bucketCount"};
FilterProvider filters = new SimpleFilterProvider()
.addFilter(VOLUME_LIST,
SimpleBeanPropertyFilter.serializeAllExcept(ignorableFieldNames));
mapper.setVisibility(JsonMethod.FIELD, JsonAutoDetect.Visibility.ANY);
mapper.getSerializationConfig()
.addMixInAnnotations(Object.class, MixIn.class);
ObjectWriter writer = mapper.writer(filters);
return writer.writeValueAsString(this);
return WRITER.writeValueAsString(this);
}
/**
@ -108,8 +116,7 @@ public class ListVolumes {
* @throws IOException
*/
public String toDBString() throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(this);
return JsonUtils.toJsonString(this);
}
/**
@ -122,8 +129,7 @@ public class ListVolumes {
* @throws IOException
*/
public static ListVolumes parse(String data) throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(data, ListVolumes.class);
return READER.readValue(data);
}
/**

View File

@ -19,18 +19,21 @@
package org.apache.hadoop.ozone.web.response;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.ozone.web.request.OzoneQuota;
import org.codehaus.jackson.annotate.JsonAutoDetect;
import org.codehaus.jackson.annotate.JsonMethod;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.codehaus.jackson.map.annotate.JsonFilter;
import org.codehaus.jackson.map.ser.FilterProvider;
import org.codehaus.jackson.map.ser.impl.SimpleBeanPropertyFilter;
import org.codehaus.jackson.map.ser.impl.SimpleFilterProvider;
import org.apache.hadoop.ozone.web.utils.JsonUtils;
import java.io.IOException;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonFilter;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.ser.FilterProvider;
import com.fasterxml.jackson.databind.ser.impl.SimpleBeanPropertyFilter;
import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
/**
* VolumeInfo Class is the Java class that represents
@ -40,6 +43,23 @@ import java.io.IOException;
public class VolumeInfo implements Comparable<VolumeInfo> {
static final String VOLUME_INFO = "VOLUME_INFO_FILTER";
private static final ObjectReader READER =
new ObjectMapper().readerFor(VolumeInfo.class);
private static final ObjectWriter WRITER;
static {
ObjectMapper mapper = new ObjectMapper();
String[] ignorableFieldNames = {"bytesUsed", "bucketCount"};
FilterProvider filters = new SimpleFilterProvider()
.addFilter(VOLUME_INFO, SimpleBeanPropertyFilter
.serializeAllExcept(ignorableFieldNames));
mapper.setVisibility(PropertyAccessor.FIELD,
JsonAutoDetect.Visibility.ANY);
mapper.addMixIn(Object.class, MixIn.class);
WRITER = mapper.writer(filters);
}
/**
* Custom Json Filter Class.
@ -174,19 +194,7 @@ public class VolumeInfo implements Comparable<VolumeInfo> {
* @throws IOException
*/
public String toJsonString() throws IOException {
ObjectMapper mapper = new ObjectMapper();
String[] ignorableFieldNames = {"bytesUsed", "bucketCount"};
FilterProvider filters = new SimpleFilterProvider()
.addFilter(VOLUME_INFO, SimpleBeanPropertyFilter
.serializeAllExcept(ignorableFieldNames));
mapper.setVisibility(JsonMethod.FIELD, JsonAutoDetect.Visibility.ANY);
mapper.getSerializationConfig()
.addMixInAnnotations(Object.class, MixIn.class);
ObjectWriter writer = mapper.writer(filters);
return writer.writeValueAsString(this);
return WRITER.writeValueAsString(this);
}
/**
@ -200,8 +208,7 @@ public class VolumeInfo implements Comparable<VolumeInfo> {
* @throws IOException
*/
public String toDBString() throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.writeValueAsString(this);
return JsonUtils.toJsonString(this);
}
@ -243,8 +250,7 @@ public class VolumeInfo implements Comparable<VolumeInfo> {
* @throws IOException
*/
public static VolumeInfo parse(String data) throws IOException {
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(data, VolumeInfo.class);
return READER.readValue(data);
}
/**

View File

@ -0,0 +1,53 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.web.utils;
import java.io.IOException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;
/**
* JSON Utility functions used in ozone.
*/
public final class JsonUtils {
// Reuse ObjectMapper instance for improving performance.
// ObjectMapper is thread safe as long as we always configure instance
// before use.
private static final ObjectMapper MAPPER = new ObjectMapper();
private static final ObjectReader READER = MAPPER.readerFor(Object.class);
private static final ObjectWriter WRITTER =
MAPPER.writerWithDefaultPrettyPrinter();
private JsonUtils() {
// Never constructed
}
public static String toJsonStringWithDefaultPrettyPrinter(String jsonString)
throws IOException {
Object json = READER.readValue(jsonString);
return WRITTER.writeValueAsString(json);
}
public static String toJsonString(Object obj) throws IOException {
return MAPPER.writeValueAsString(obj);
}
}

View File

@ -0,0 +1,18 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.web.utils;