Skip to content

Commit

Permalink
HADOOP-16908. Prune Jackson 1 from the codebase and restrict it's usa…
Browse files Browse the repository at this point in the history
…ge for future
  • Loading branch information
virajjasani committed Dec 16, 2021
1 parent f544ef4 commit aab4cd1
Show file tree
Hide file tree
Showing 32 changed files with 291 additions and 136 deletions.
18 changes: 18 additions & 0 deletions hadoop-common-project/hadoop-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,24 @@
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.sun.jersey</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,9 @@
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.StandbyException;
import org.apache.hadoop.test.GenericTestUtils;
import org.codehaus.jackson.map.ObjectMapper;

import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.After;
import org.junit.Rule;
import org.junit.Test;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,11 @@
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;

import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.ObjectWriter;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
Expand Down Expand Up @@ -60,10 +65,6 @@
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Timer;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import org.codehaus.jackson.map.ObjectWriter;

import org.apache.hadoop.classification.VisibleForTesting;

Expand Down Expand Up @@ -371,8 +372,6 @@ public void releaseReservedSpace(long bytesToRelease) {

private static final ObjectWriter WRITER =
new ObjectMapper().writerWithDefaultPrettyPrinter();
private static final ObjectReader READER =
new ObjectMapper().reader(ProvidedBlockIteratorState.class);

private static class ProvidedBlockIteratorState {
ProvidedBlockIteratorState() {
Expand Down
16 changes: 16 additions & 0 deletions hadoop-project/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -870,6 +870,22 @@
<groupId>stax</groupId>
<artifactId>stax-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down
12 changes: 0 additions & 12 deletions hadoop-tools/hadoop-azure/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -178,18 +178,6 @@
<scope>compile</scope>
</dependency>

<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<scope>compile</scope>
</dependency>

<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
<scope>compile</scope>
</dependency>

<dependency>
<groupId>org.wildfly.openssl</groupId>
<artifactId>wildfly-openssl</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@

package org.apache.hadoop.fs.azurebfs.contracts.services;

import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.annotate.JsonProperty;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

import org.apache.hadoop.classification.InterfaceStability;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@

import java.util.List;

import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.annotate.JsonProperty;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

import org.apache.hadoop.classification.InterfaceStability;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,10 @@
import java.util.Map;

import org.apache.hadoop.util.Preconditions;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonToken;

import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -451,7 +452,7 @@ private static AzureADToken parseTokenFromStream(
long expiresOnInSecs = -1;

JsonFactory jf = new JsonFactory();
JsonParser jp = jf.createJsonParser(httpResponseStream);
JsonParser jp = jf.createParser(httpResponseStream);
String fieldName, fieldValue;
jp.nextToken();
while (jp.hasCurrentToken()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,11 @@

import org.apache.hadoop.fs.azurebfs.utils.UriUtils;
import org.apache.hadoop.security.ssl.DelegatingSSLSocketFactory;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonToken;
import org.codehaus.jackson.map.ObjectMapper;

import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -467,7 +467,7 @@ private void processStorageErrorResponse() {
return;
}
JsonFactory jf = new JsonFactory();
try (JsonParser jp = jf.createJsonParser(stream)) {
try (JsonParser jp = jf.createParser(stream)) {
String fieldName, fieldValue;
jp.nextToken(); // START_OBJECT - {
jp.nextToken(); // FIELD_NAME - "error":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

import java.io.IOException;

import org.codehaus.jackson.map.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Test;

import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultEntrySchema;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,10 @@
import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParser;
import org.codehaus.jackson.JsonToken;

import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import org.slf4j.Logger;


Expand Down Expand Up @@ -484,7 +485,7 @@ static Set<String> parseStaleDataNodeList(String liveNodeJsonString,
final Set<String> dataNodesToReport = new HashSet<>();

JsonFactory fac = new JsonFactory();
JsonParser parser = fac.createJsonParser(IOUtils
JsonParser parser = fac.createParser(IOUtils
.toInputStream(liveNodeJsonString, StandardCharsets.UTF_8.name()));

int objectDepth = 0;
Expand Down Expand Up @@ -554,7 +555,7 @@ static String fetchNameNodeJMXValue(Properties nameNodeProperties,
}
InputStream in = conn.getInputStream();
JsonFactory fac = new JsonFactory();
JsonParser parser = fac.createJsonParser(in);
JsonParser parser = fac.createParser(in);
if (parser.nextToken() != JsonToken.START_OBJECT
|| parser.nextToken() != JsonToken.FIELD_NAME
|| !parser.getCurrentName().equals("beans")
Expand Down
18 changes: 18 additions & 0 deletions hadoop-tools/hadoop-resourceestimator/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,24 @@
<dependency>
<groupId>com.sun.jersey</groupId>
<artifactId>jersey-json</artifactId>
<exclusions>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-core-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-jaxrs</artifactId>
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-xc</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>junit</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,13 @@
*/
package org.apache.hadoop.yarn.sls.synthetic;

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonFactoryBuilder;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.math3.distribution.AbstractRealDistribution;
Expand All @@ -30,18 +37,13 @@
import org.apache.hadoop.yarn.api.records.ExecutionType;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.sls.appmaster.MRAMSimulator;
import org.codehaus.jackson.annotate.JsonCreator;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;

import javax.xml.bind.annotation.XmlRootElement;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;

import static org.codehaus.jackson.JsonParser.Feature.INTERN_FIELD_NAMES;
import static org.codehaus.jackson.map.DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES;
import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES;

/**
* This is a JobStoryProducer that operates from distribution of different
Expand Down Expand Up @@ -84,15 +86,16 @@ public SynthTraceJobProducer(Configuration conf, Path path)
this.conf = conf;
this.rand = new JDKRandomGenerator();

ObjectMapper mapper = new ObjectMapper();
mapper.configure(INTERN_FIELD_NAMES, true);
JsonFactoryBuilder jsonFactoryBuilder = new JsonFactoryBuilder();
jsonFactoryBuilder.configure(JsonFactory.Feature.INTERN_FIELD_NAMES, true);
ObjectMapper mapper = new ObjectMapper(jsonFactoryBuilder.build());
mapper.configure(FAIL_ON_UNKNOWN_PROPERTIES, false);

FileSystem ifs = path.getFileSystem(conf);
FSDataInputStream fileIn = ifs.open(path);

// Initialize the random generator and the seed
this.trace = mapper.readValue(fileIn, Trace.class);
this.trace = mapper.readValue(fileIn.getWrappedStream(), Trace.class);
this.seed = trace.rand_seed;
this.rand.setSeed(seed);
// Initialize the trace
Expand Down Expand Up @@ -538,9 +541,9 @@ public Sample(@JsonProperty("val") Double val,
if(val!=null){
if(std==null){
// Constant
if(dist!=null || discrete!=null || weights!=null){
throw new JsonMappingException("Instantiation of " + Sample.class
+ " failed");
if (dist != null || discrete != null || weights != null) {
throw JsonMappingException
.from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
}
mode = Mode.CONST;
this.val = val;
Expand All @@ -550,9 +553,9 @@ public Sample(@JsonProperty("val") Double val,
this.weights = null;
} else {
// Distribution
if(discrete!=null || weights != null){
throw new JsonMappingException("Instantiation of " + Sample.class
+ " failed");
if (discrete != null || weights != null) {
throw JsonMappingException
.from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
}
mode = Mode.DIST;
this.val = val;
Expand All @@ -563,9 +566,9 @@ public Sample(@JsonProperty("val") Double val,
}
} else {
// Discrete
if(discrete==null){
throw new JsonMappingException("Instantiation of " + Sample.class
+ " failed");
if (discrete == null) {
throw JsonMappingException
.from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
}
mode = Mode.DISC;
this.val = 0;
Expand All @@ -576,9 +579,9 @@ public Sample(@JsonProperty("val") Double val,
weights = new ArrayList<>(Collections.nCopies(
discrete.size(), 1.0));
}
if(weights.size() != discrete.size()){
throw new JsonMappingException("Instantiation of " + Sample.class
+ " failed");
if (weights.size() != discrete.size()) {
throw JsonMappingException
.from((JsonParser) null, "Instantiation of " + Sample.class + " failed");
}
this.weights = weights;
}
Expand Down
Loading

0 comments on commit aab4cd1

Please sign in to comment.