HADOOP-16908. Prune Jackson 1 from the codebase and restrict it's usage for future (#3789)

Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
This commit is contained in:
Viraj Jasani 2021-12-20 12:31:34 +05:30 committed by GitHub
parent 07141426e0
commit 04b6b9a87b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
32 changed files with 290 additions and 140 deletions

View File

@ -147,6 +147,24 @@
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId> <artifactId>jersey-json</artifactId>
<scope>compile</scope> <scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>

View File

@ -21,8 +21,9 @@ package org.apache.hadoop.metrics2;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -50,7 +50,8 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.StandbyException; import org.apache.hadoop.ipc.StandbyException;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.codehaus.jackson.map.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.After; import org.junit.After;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;

View File

@ -32,6 +32,10 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -60,10 +64,6 @@ import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import org.codehaus.jackson.map.ObjectWriter;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
@ -371,14 +371,11 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
private static final ObjectWriter WRITER = private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter(); new ObjectMapper().writerWithDefaultPrettyPrinter();
private static final ObjectReader READER =
new ObjectMapper().reader(ProvidedBlockIteratorState.class);
private static class ProvidedBlockIteratorState { private static class ProvidedBlockIteratorState {
ProvidedBlockIteratorState() { ProvidedBlockIteratorState() {
iterStartMs = Time.now(); iterStartMs = Time.now();
lastSavedMs = iterStartMs; lastSavedMs = iterStartMs;
atEnd = false;
lastBlockId = -1L; lastBlockId = -1L;
} }
@ -390,9 +387,6 @@ class ProvidedVolumeImpl extends FsVolumeImpl {
@JsonProperty @JsonProperty
private long iterStartMs; private long iterStartMs;
@JsonProperty
private boolean atEnd;
// The id of the last block read when the state of the iterator is saved. // The id of the last block read when the state of the iterator is saved.
// This implementation assumes that provided blocks are returned // This implementation assumes that provided blocks are returned
// in sorted order of the block ids. // in sorted order of the block ids.

View File

@ -870,6 +870,22 @@
<groupId>stax</groupId> <groupId>stax</groupId>
<artifactId>stax-api</artifactId> <artifactId>stax-api</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -178,18 +178,6 @@
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<scope>compile</scope>
</dependency>
<dependency> <dependency>
<groupId>org.wildfly.openssl</groupId> <groupId>org.wildfly.openssl</groupId>
<artifactId>wildfly-openssl</artifactId> <artifactId>wildfly-openssl</artifactId>

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.fs.azurebfs.contracts.services; package org.apache.hadoop.fs.azurebfs.contracts.services;
import org.codehaus.jackson.annotate.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import org.codehaus.jackson.annotate.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;

View File

@ -20,8 +20,8 @@ package org.apache.hadoop.fs.azurebfs.contracts.services;
import java.util.List; import java.util.List;
import org.codehaus.jackson.annotate.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import org.codehaus.jackson.annotate.JsonProperty; import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;

View File

@ -30,9 +30,10 @@ import java.util.Hashtable;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.util.Preconditions;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParser; import com.fasterxml.jackson.core.JsonFactory;
import org.codehaus.jackson.JsonToken; import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -451,7 +452,7 @@ public final class AzureADAuthenticator {
long expiresOnInSecs = -1; long expiresOnInSecs = -1;
JsonFactory jf = new JsonFactory(); JsonFactory jf = new JsonFactory();
JsonParser jp = jf.createJsonParser(httpResponseStream); JsonParser jp = jf.createParser(httpResponseStream);
String fieldName, fieldValue; String fieldName, fieldValue;
jp.nextToken(); jp.nextToken();
while (jp.hasCurrentToken()) { while (jp.hasCurrentToken()) {

View File

@ -30,11 +30,11 @@ import javax.net.ssl.SSLSocketFactory;
import org.apache.hadoop.fs.azurebfs.utils.UriUtils; import org.apache.hadoop.fs.azurebfs.utils.UriUtils;
import org.apache.hadoop.security.ssl.DelegatingSSLSocketFactory; import org.apache.hadoop.security.ssl.DelegatingSSLSocketFactory;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonToken;
import org.codehaus.jackson.map.ObjectMapper;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -467,7 +467,7 @@ public class AbfsHttpOperation implements AbfsPerfLoggable {
return; return;
} }
JsonFactory jf = new JsonFactory(); JsonFactory jf = new JsonFactory();
try (JsonParser jp = jf.createJsonParser(stream)) { try (JsonParser jp = jf.createParser(stream)) {
String fieldName, fieldValue; String fieldName, fieldValue;
jp.nextToken(); // START_OBJECT - { jp.nextToken(); // START_OBJECT - {
jp.nextToken(); // FIELD_NAME - "error": jp.nextToken(); // FIELD_NAME - "error":

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.fs.azurebfs.contract;
import java.io.IOException; import java.io.IOException;
import org.codehaus.jackson.map.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Test; import org.junit.Test;
import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultEntrySchema; import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultEntrySchema;

View File

@ -54,9 +54,10 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler; import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParser; import com.fasterxml.jackson.core.JsonFactory;
import org.codehaus.jackson.JsonToken; import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -484,7 +485,7 @@ public final class DynoInfraUtils {
final Set<String> dataNodesToReport = new HashSet<>(); final Set<String> dataNodesToReport = new HashSet<>();
JsonFactory fac = new JsonFactory(); JsonFactory fac = new JsonFactory();
JsonParser parser = fac.createJsonParser(IOUtils JsonParser parser = fac.createParser(IOUtils
.toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name())); .toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name()));
int objectDepth = 0; int objectDepth = 0;
@ -554,7 +555,7 @@ public final class DynoInfraUtils {
} }
InputStream in = conn.getInputStream(); InputStream in = conn.getInputStream();
JsonFactory fac = new JsonFactory(); JsonFactory fac = new JsonFactory();
JsonParser parser = fac.createJsonParser(in); JsonParser parser = fac.createParser(in);
if (parser.nextToken() != JsonToken.START_OBJECT if (parser.nextToken() != JsonToken.START_OBJECT
|| parser.nextToken() != JsonToken.FIELD_NAME || parser.nextToken() != JsonToken.FIELD_NAME
|| !parser.getCurrentName().equals("beans") || !parser.getCurrentName().equals("beans")

View File

@ -81,6 +81,24 @@
<dependency> <dependency>
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId> <artifactId>jersey-json</artifactId>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>junit</groupId> <groupId>junit</groupId>

View File

@ -17,6 +17,13 @@
*/ */
package org.apache.hadoop.yarn.sls.synthetic; package org.apache.hadoop.yarn.sls.synthetic;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonFactoryBuilder;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.commons.math3.distribution.AbstractRealDistribution; import org.apache.commons.math3.distribution.AbstractRealDistribution;
@ -30,18 +37,13 @@ import org.apache.hadoop.tools.rumen.JobStoryProducer;
import org.apache.hadoop.yarn.api.records.ExecutionType; import org.apache.hadoop.yarn.api.records.ExecutionType;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.sls.appmaster.MRAMSimulator; import org.apache.hadoop.yarn.sls.appmaster.MRAMSimulator;
import org.codehaus.jackson.annotate.JsonCreator;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlRootElement;
import java.io.IOException; import java.io.IOException;
import java.util.*; import java.util.*;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import static org.codehaus.jackson.JsonParser.Feature.INTERN_FIELD_NAMES; import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES;
import static org.codehaus.jackson.map.DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES;
/** /**
* This is a JobStoryProducer that operates from distribution of different * This is a JobStoryProducer that operates from distribution of different
@ -84,15 +86,16 @@ public class SynthTraceJobProducer implements JobStoryProducer {
this.conf = conf; this.conf = conf;
this.rand = new JDKRandomGenerator(); this.rand = new JDKRandomGenerator();
ObjectMapper mapper = new ObjectMapper(); JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
mapper.configure(INTERN_FIELD_NAMES, true); jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
FileSystem ifs = path.getFileSystem(conf); FileSystem ifs = path.getFileSystem(conf);
FSDataInputStream fileIn = ifs.open(path); FSDataInputStream fileIn = ifs.open(path);
// Initialize the random generator and the seed // Initialize the random generator and the seed
this.trace = mapper.readValue(fileIn, Trace.class); this.trace = mapper.readValue(fileIn.getWrappedStream(), Trace.class);
this.seed = trace.rand_seed; this.seed = trace.rand_seed;
this.rand.setSeed(seed); this.rand.setSeed(seed);
// Initialize the trace // Initialize the trace
@ -538,9 +541,9 @@ public class SynthTraceJobProducer implements JobStoryProducer {
if(val!=null){ if(val!=null){
if(std==null){ if(std==null){
// Constant // Constant
if(dist!=null || discrete!=null || weights!=null){ if (dist != null || discrete != null || weights != null) {
throw new JsonMappingException("Instantiation of " + Sample.class throw JsonMappingException
+ " failed"); .from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
} }
mode = Mode.CONST; mode = Mode.CONST;
this.val = val; this.val = val;
@ -550,9 +553,9 @@ public class SynthTraceJobProducer implements JobStoryProducer {
this.weights = null; this.weights = null;
} else { } else {
// Distribution // Distribution
if(discrete!=null || weights != null){ if (discrete != null || weights != null) {
throw new JsonMappingException("Instantiation of " + Sample.class throw JsonMappingException
+ " failed"); .from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
} }
mode = Mode.DIST; mode = Mode.DIST;
this.val = val; this.val = val;
@ -563,9 +566,9 @@ public class SynthTraceJobProducer implements JobStoryProducer {
} }
} else { } else {
// Discrete // Discrete
if(discrete==null){ if (discrete == null) {
throw new JsonMappingException("Instantiation of " + Sample.class throw JsonMappingException
+ " failed"); .from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
} }
mode = Mode.DISC; mode = Mode.DISC;
this.val = 0; this.val = 0;
@ -576,9 +579,9 @@ public class SynthTraceJobProducer implements JobStoryProducer {
weights = new ArrayList<>(Collections.nCopies( weights = new ArrayList<>(Collections.nCopies(
discrete.size(), 1.0)); discrete.size(), 1.0));
} }
if(weights.size() != discrete.size()){ if (weights.size() != discrete.size()) {
throw new JsonMappingException("Instantiation of " + Sample.class throw JsonMappingException
+ " failed"); .from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
} }
this.weights = weights; this.weights = weights;
} }

View File

@ -19,11 +19,14 @@ package org.apache.hadoop.yarn.sls;
import org.apache.commons.math3.random.JDKRandomGenerator; import org.apache.commons.math3.random.JDKRandomGenerator;
import org.apache.hadoop.yarn.api.records.ExecutionType; import org.apache.hadoop.yarn.api.records.ExecutionType;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.sls.synthetic.SynthJob; import org.apache.hadoop.yarn.sls.synthetic.SynthJob;
import org.apache.hadoop.yarn.sls.synthetic.SynthTraceJobProducer; import org.apache.hadoop.yarn.sls.synthetic.SynthTraceJobProducer;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonFactoryBuilder;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -32,12 +35,10 @@ import org.slf4j.LoggerFactory;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.codehaus.jackson.JsonParser.Feature.INTERN_FIELD_NAMES;
import static org.codehaus.jackson.map.DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES;
/** /**
* Simple test class driving the {@code SynthTraceJobProducer}, and validating * Simple test class driving the {@code SynthTraceJobProducer}, and validating
* jobs produce are within expected range. * jobs produce are within expected range.
@ -56,8 +57,9 @@ public class TestSynthJobGeneration {
+ "{\"time\": 60, \"weight\": 2}," + "{\"time\": 90, \"weight\": 1}" + "{\"time\": 60, \"weight\": 2}," + "{\"time\": 90, \"weight\": 1}"
+ "]}"; + "]}";
ObjectMapper mapper = new ObjectMapper(); JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
mapper.configure(INTERN_FIELD_NAMES, true); jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
SynthTraceJobProducer.Workload wl = SynthTraceJobProducer.Workload wl =
mapper.readValue(workloadJson, SynthTraceJobProducer.Workload.class); mapper.readValue(workloadJson, SynthTraceJobProducer.Workload.class);
@ -176,8 +178,9 @@ public class TestSynthJobGeneration {
@Test @Test
public void testSample() throws IOException { public void testSample() throws IOException {
ObjectMapper mapper = new ObjectMapper(); JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
mapper.configure(INTERN_FIELD_NAMES, true); jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);
JDKRandomGenerator rand = new JDKRandomGenerator(); JDKRandomGenerator rand = new JDKRandomGenerator();
@ -235,7 +238,7 @@ public class TestSynthJobGeneration {
mapper.readValue(invalidDistJson, SynthTraceJobProducer.Sample.class); mapper.readValue(invalidDistJson, SynthTraceJobProducer.Sample.class);
Assert.fail(); Assert.fail();
} catch (JsonMappingException e) { } catch (JsonMappingException e) {
Assert.assertTrue(e.getMessage().startsWith("Instantiation of")); Assert.assertTrue(e.getMessage().startsWith("Cannot construct instance of"));
} }
} }

View File

@ -95,6 +95,24 @@
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId> <artifactId>jersey-json</artifactId>
<version>${jersey.version}</version> <version>${jersey.version}</version>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -158,6 +158,24 @@
<dependency> <dependency>
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId> <artifactId>jersey-json</artifactId>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.sun.jersey.contribs</groupId> <groupId>com.sun.jersey.contribs</groupId>

View File

@ -28,11 +28,12 @@ import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.security.DockerCredentialTokenIdentifier; import org.apache.hadoop.yarn.security.DockerCredentialTokenIdentifier;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonNode; import com.fasterxml.jackson.core.JsonFactory;
import org.codehaus.jackson.JsonParser; import com.fasterxml.jackson.core.JsonParser;
import org.codehaus.jackson.map.ObjectMapper; import com.fasterxml.jackson.databind.JsonNode;
import org.codehaus.jackson.node.ObjectNode; import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.File; import java.io.File;
@ -97,13 +98,13 @@ public final class DockerClientConfigHandler {
// Parse the JSON and create the Tokens/Credentials. // Parse the JSON and create the Tokens/Credentials.
ObjectMapper mapper = new ObjectMapper(); ObjectMapper mapper = new ObjectMapper();
JsonFactory factory = mapper.getJsonFactory(); JsonFactory factory = mapper.getFactory();
JsonParser parser = factory.createJsonParser(contents); JsonParser parser = factory.createParser(contents);
JsonNode rootNode = mapper.readTree(parser); JsonNode rootNode = mapper.readTree(parser);
Credentials credentials = new Credentials(); Credentials credentials = new Credentials();
if (rootNode.has(CONFIG_AUTHS_KEY)) { if (rootNode.has(CONFIG_AUTHS_KEY)) {
Iterator<String> iter = rootNode.get(CONFIG_AUTHS_KEY).getFieldNames(); Iterator<String> iter = rootNode.get(CONFIG_AUTHS_KEY).fieldNames();
for (; iter.hasNext();) { for (; iter.hasNext();) {
String registryUrl = iter.next(); String registryUrl = iter.next();
String registryCred = rootNode.get(CONFIG_AUTHS_KEY) String registryCred = rootNode.get(CONFIG_AUTHS_KEY)
@ -169,14 +170,14 @@ public final class DockerClientConfigHandler {
DockerCredentialTokenIdentifier ti = DockerCredentialTokenIdentifier ti =
(DockerCredentialTokenIdentifier) tk.decodeIdentifier(); (DockerCredentialTokenIdentifier) tk.decodeIdentifier();
ObjectNode registryCredNode = mapper.createObjectNode(); ObjectNode registryCredNode = mapper.createObjectNode();
registryUrlNode.put(ti.getRegistryUrl(), registryCredNode); registryUrlNode.set(ti.getRegistryUrl(), registryCredNode);
registryCredNode.put(CONFIG_AUTH_KEY, registryCredNode.put(CONFIG_AUTH_KEY,
new String(tk.getPassword(), Charset.forName("UTF-8"))); new String(tk.getPassword(), Charset.forName("UTF-8")));
LOG.debug("Prepared token for write: {}", tk); LOG.debug("Prepared token for write: {}", tk);
} }
} }
if (foundDockerCred) { if (foundDockerCred) {
rootNode.put(CONFIG_AUTHS_KEY, registryUrlNode); rootNode.set(CONFIG_AUTHS_KEY, registryUrlNode);
String json = mapper.writerWithDefaultPrettyPrinter() String json = mapper.writerWithDefaultPrettyPrinter()
.writeValueAsString(rootNode); .writeValueAsString(rootNode);
FileUtils.writeStringToFile( FileUtils.writeStringToFile(

View File

@ -94,6 +94,24 @@
<dependency> <dependency>
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId> <artifactId>jersey-json</artifactId>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.sun.jersey.contribs</groupId> <groupId>com.sun.jersey.contribs</groupId>

View File

@ -154,6 +154,24 @@
<dependency> <dependency>
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId> <artifactId>jersey-json</artifactId>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.sun.jersey.contribs</groupId> <groupId>com.sun.jersey.contribs</groupId>

View File

@ -25,15 +25,17 @@ import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.codehaus.jackson.annotate.JsonIgnore;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.ObjectMapper;
/** /**
* The NetworkTagMapping JsonManager implementation. * The NetworkTagMapping JsonManager implementation.

View File

@ -62,8 +62,6 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.Contai
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext; import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.volume.csi.ContainerVolumePublisher; import org.apache.hadoop.yarn.server.nodemanager.containermanager.volume.csi.ContainerVolumePublisher;
import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerExecContext; import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerExecContext;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -81,6 +79,9 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_IMAGE_TAG_TO_MANIFEST_PLUGIN; import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_IMAGE_TAG_TO_MANIFEST_PLUGIN;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_LAYER_MOUNTS_TO_KEEP; import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_LAYER_MOUNTS_TO_KEEP;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL; import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL;
@ -642,7 +643,7 @@ public class RuncContainerRuntime extends OCIContainerRuntime {
if (envNode.isMissingNode()) { if (envNode.isMissingNode()) {
return null; return null;
} }
return mapper.readValue(envNode, List.class); return mapper.readValue(envNode.traverse(), List.class);
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@ -653,7 +654,7 @@ public class RuncContainerRuntime extends OCIContainerRuntime {
if (entrypointNode.isMissingNode()) { if (entrypointNode.isMissingNode()) {
return null; return null;
} }
return mapper.readValue(entrypointNode, List.class); return mapper.readValue(entrypointNode.traverse(), List.class);
} }
private RuncContainerExecutorConfig createRuncContainerExecutorConfig( private RuncContainerExecutorConfig createRuncContainerExecutorConfig(

View File

@ -29,7 +29,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.concurrent.HadoopExecutors; import org.apache.hadoop.util.concurrent.HadoopExecutors;
import org.codehaus.jackson.map.ObjectMapper;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
@ -45,6 +44,8 @@ import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_CACHE_REFRESH_INTERVAL; import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_CACHE_REFRESH_INTERVAL;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_IMAGE_TOPLEVEL_DIR; import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_IMAGE_TOPLEVEL_DIR;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NUM_MANIFESTS_TO_CACHE; import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NUM_MANIFESTS_TO_CACHE;

View File

@ -20,12 +20,13 @@
package org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc; package org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.annotate.JsonRawValue;
import org.codehaus.jackson.map.annotate.JsonSerialize;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonRawValue;
/** /**
* This class is used by the * This class is used by the
* {@link org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.RuncContainerRuntime} * {@link org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.RuncContainerRuntime}
@ -35,7 +36,7 @@ import java.util.Map;
* a JSON object named ociRuntimeConfig that mirrors the * a JSON object named ociRuntimeConfig that mirrors the
* OCI runtime specification. * OCI runtime specification.
*/ */
@JsonSerialize(include=JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class RuncContainerExecutorConfig { public class RuncContainerExecutorConfig {
final private String version; final private String version;
@ -164,7 +165,7 @@ public class RuncContainerExecutorConfig {
/** /**
* This class is a Java representation of an OCI image layer. * This class is a Java representation of an OCI image layer.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
@InterfaceStability.Unstable @InterfaceStability.Unstable
public static class OCILayer { public static class OCILayer {
final private String mediaType; final private String mediaType;
@ -192,7 +193,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the OCI Runtime Specification. * This class is a Java representation of the OCI Runtime Specification.
*/ */
@InterfaceStability.Unstable @InterfaceStability.Unstable
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCIRuntimeConfig { public static class OCIRuntimeConfig {
final private OCIRootConfig root; final private OCIRootConfig root;
final private List<OCIMount> mounts; final private List<OCIMount> mounts;
@ -254,7 +255,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the oci root config section * This class is a Java representation of the oci root config section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCIRootConfig { public static class OCIRootConfig {
public String getPath() { public String getPath() {
return path; return path;
@ -281,7 +282,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the oci mount section * This class is a Java representation of the oci mount section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCIMount { public static class OCIMount {
final private String destination; final private String destination;
final private String type; final private String type;
@ -329,7 +330,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the oci process section * This class is a Java representation of the oci process section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCIProcessConfig { public static class OCIProcessConfig {
final private boolean terminal; final private boolean terminal;
final private ConsoleSize consoleSize; final private ConsoleSize consoleSize;
@ -422,7 +423,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the console size section * This class is a Java representation of the console size section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class ConsoleSize { public static class ConsoleSize {
public int getHeight() { public int getHeight() {
return height; return height;
@ -450,7 +451,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the rlimits section * This class is a Java representation of the rlimits section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class RLimits { public static class RLimits {
public String getType() { public String getType() {
return type; return type;
@ -484,7 +485,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the capabilities section * This class is a Java representation of the capabilities section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Capabilities { public static class Capabilities {
final private List<String> effective; final private List<String> effective;
final private List<String> bounding; final private List<String> bounding;
@ -554,7 +555,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the oci hooks section * This class is a Java representation of the oci hooks section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCIHooksConfig { public static class OCIHooksConfig {
final private List<HookType> prestart; final private List<HookType> prestart;
final private List<HookType> poststart; final private List<HookType> poststart;
@ -587,7 +588,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the hook type section * This class is a Java representation of the hook type section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class HookType { public static class HookType {
final private String path; final private String path;
final private List<String> args; final private List<String> args;
@ -650,7 +651,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the oci linux config section * This class is a Java representation of the oci linux config section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class OCILinuxConfig { public static class OCILinuxConfig {
final private List<Namespace> namespaces; final private List<Namespace> namespaces;
final private List<IDMapping> uidMappings; final private List<IDMapping> uidMappings;
@ -768,7 +769,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the idmapping section * This class is a Java representation of the idmapping section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class IDMapping { public static class IDMapping {
final private int containerID; final private int containerID;
final private int hostID; final private int hostID;
@ -802,7 +803,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the device section * This class is a Java representation of the device section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Device { public static class Device {
final private String type; final private String type;
final private String path; final private String path;
@ -861,7 +862,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the resources section * This class is a Java representation of the resources section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Resources { public static class Resources {
final private List<Device> device; final private List<Device> device;
final private Memory memory; final private Memory memory;
@ -927,7 +928,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the device section * This class is a Java representation of the device section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Device { public static class Device {
final private boolean allow; final private boolean allow;
final private String type; final private String type;
@ -973,7 +974,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the memory section * This class is a Java representation of the memory section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Memory { public static class Memory {
final private long limit; final private long limit;
final private long reservation; final private long reservation;
@ -1032,7 +1033,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the cpu section * This class is a Java representation of the cpu section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class CPU { public static class CPU {
final private long quota; final private long quota;
final private long period; final private long period;
@ -1092,7 +1093,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the blockio section * This class is a Java representation of the blockio section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class BlockIO { public static class BlockIO {
final private int weight; final private int weight;
final private int leafWeight; final private int leafWeight;
@ -1153,7 +1154,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the weight device section * This class is a Java representation of the weight device section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class WeightDevice { public static class WeightDevice {
final private long major; final private long major;
final private long minor; final private long minor;
@ -1193,7 +1194,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the throttle device section * This class is a Java representation of the throttle device section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class ThrottleDevice { public static class ThrottleDevice {
final private long major; final private long major;
final private long minor; final private long minor;
@ -1227,7 +1228,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the huge page limits section * This class is a Java representation of the huge page limits section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class HugePageLimits { public static class HugePageLimits {
final private String pageSize; final private String pageSize;
final private long limit; final private long limit;
@ -1254,7 +1255,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the network section * This class is a Java representation of the network section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Network { public static class Network {
final private int classID; final private int classID;
final private List<NetworkPriority> priorities; final private List<NetworkPriority> priorities;
@ -1280,7 +1281,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the network priority section * This class is a Java representation of the network priority section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class NetworkPriority { public static class NetworkPriority {
final private String name; final private String name;
final private int priority; final private int priority;
@ -1308,7 +1309,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the pid section * This class is a Java representation of the pid section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class PID { public static class PID {
final private long limit; final private long limit;
@ -1329,7 +1330,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the rdma section * This class is a Java representation of the rdma section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class RDMA { public static class RDMA {
final private int hcaHandles; final private int hcaHandles;
final private int hcaObjects; final private int hcaObjects;
@ -1357,7 +1358,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the intelrdt section * This class is a Java representation of the intelrdt section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class IntelRdt { public static class IntelRdt {
final private String closID; final private String closID;
final private String l3CacheSchema; final private String l3CacheSchema;
@ -1391,7 +1392,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the sysctl section * This class is a Java representation of the sysctl section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Sysctl { public static class Sysctl {
// for kernel params // for kernel params
} }
@ -1400,7 +1401,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the seccomp section * This class is a Java representation of the seccomp section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Seccomp { public static class Seccomp {
final private String defaultAction; final private String defaultAction;
final private List<String> architectures; final private List<String> architectures;
@ -1433,7 +1434,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the syscall section * This class is a Java representation of the syscall section
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class Syscall { public static class Syscall {
final private List<String> names; final private List<String> names;
final private String action; final private String action;
@ -1466,7 +1467,7 @@ public class RuncContainerExecutorConfig {
* This class is a Java representation of the seccomp arguments * This class is a Java representation of the seccomp arguments
* of the OCI Runtime Specification. * of the OCI Runtime Specification.
*/ */
@JsonSerialize(include = JsonSerialize.Inclusion.NON_DEFAULT) @JsonInclude(JsonInclude.Include.NON_DEFAULT)
public static class SeccompArg { public static class SeccompArg {
final private int index; final private int index;
final private long value; final private long value;

View File

@ -57,7 +57,6 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.Reso
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerExecutionException; import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerExecutionException;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeConstants; import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeConstants;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext; import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext;
import org.codehaus.jackson.map.ObjectMapper;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;

View File

@ -24,7 +24,8 @@ import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.ImageManifest; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.ImageManifest;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.ImageTagToManifestPlugin; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.ImageTagToManifestPlugin;
import org.codehaus.jackson.map.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;

View File

@ -58,8 +58,9 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.Contai
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeConstants; import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeConstants;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext; import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService; import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper; import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
@ -501,7 +502,7 @@ public class TestRuncContainerRuntime {
JsonNode configNode = mapper.readTree(configFile); JsonNode configNode = mapper.readTree(configFile);
RuncContainerExecutorConfig runcContainerExecutorConfig = RuncContainerExecutorConfig runcContainerExecutorConfig =
mapper.readValue(configNode, RuncContainerExecutorConfig.class); mapper.readValue(configNode.traverse(), RuncContainerExecutorConfig.class);
configSize = configNode.size(); configSize = configNode.size();
OCIRuntimeConfig ociRuntimeConfig = OCIRuntimeConfig ociRuntimeConfig =

View File

@ -109,6 +109,24 @@
<dependency> <dependency>
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId> <artifactId>jersey-json</artifactId>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.sun.jersey.contribs</groupId> <groupId>com.sun.jersey.contribs</groupId>

View File

@ -19,6 +19,8 @@
package org.apache.hadoop.yarn.server.resourcemanager.resource; package org.apache.hadoop.yarn.server.resourcemanager.resource;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -29,7 +31,6 @@ import org.apache.hadoop.yarn.exceptions.YARNFeatureNotEnabledException;
import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.util.resource.ResourceUtils; import org.apache.hadoop.yarn.util.resource.ResourceUtils;
import org.apache.hadoop.yarn.util.resource.Resources; import org.apache.hadoop.yarn.util.resource.Resources;
import org.codehaus.jackson.map.ObjectMapper;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;

View File

@ -23,11 +23,12 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.entity.TimelineEntityDocument; import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.entity.TimelineEntityDocument;
import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.flowactivity.FlowActivityDocument; import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.flowactivity.FlowActivityDocument;
import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.flowrun.FlowRunDocument; import org.apache.hadoop.yarn.server.timelineservice.documentstore.collection.document.flowrun.FlowRunDocument;
import org.codehaus.jackson.type.TypeReference;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import com.fasterxml.jackson.core.type.TypeReference;
/** /**
* This is util class for baking sample TimelineEntities data for test. * This is util class for baking sample TimelineEntities data for test.
*/ */

View File

@ -18,12 +18,12 @@
package org.apache.hadoop.yarn.server.timelineservice.documentstore; package org.apache.hadoop.yarn.server.timelineservice.documentstore;
import org.codehaus.jackson.map.DeserializationConfig;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.type.TypeReference;
import java.io.IOException; import java.io.IOException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
/** /**
* A simple util class for Json SerDe. * A simple util class for Json SerDe.
*/ */
@ -34,8 +34,7 @@ public final class JsonUtils {
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
static { static {
OBJECT_MAPPER.configure( OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, false);
} }
/** /**
@ -48,7 +47,7 @@ public final class JsonUtils {
* @throws IOException if Json String is not valid or error * @throws IOException if Json String is not valid or error
* while deserialization * while deserialization
*/ */
public static <T> T fromJson(final String jsonStr, final TypeReference type) public static <T> T fromJson(final String jsonStr, final TypeReference<T> type)
throws IOException { throws IOException {
return OBJECT_MAPPER.readValue(jsonStr, type); return OBJECT_MAPPER.readValue(jsonStr, type);
} }

View File

@ -273,6 +273,14 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x
<bannedImport>static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.**</bannedImport> <bannedImport>static org.apache.hadoop.thirdparty.com.google.common.base.Preconditions.**</bannedImport>
</bannedImports> </bannedImports>
</restrictImports> </restrictImports>
<restrictImports implementation="de.skuzzle.enforcer.restrictimports.rule.RestrictImports">
<includeTestCode>true</includeTestCode>
<reason>Use Fasterxml Jackson 2 dependency in place of org.codehaus Jackson 1</reason>
<bannedImports>
<bannedImport>org.codehaus.jackson.**</bannedImport>
<bannedImport>static org.codehaus.jackson.**</bannedImport>
</bannedImports>
</restrictImports>
</rules> </rules>
</configuration> </configuration>
</execution> </execution>