diff --git a/CHANGES.txt b/CHANGES.txt
index eb582b0ddf1..df66086904e 100644
--- a/CHANGES.txt
+++ b/CHANGES.txt
@@ -32,9 +32,6 @@ Trunk (unreleased changes)
HADOOP-6392. Run namenode and jobtracker on separate EC2 instances.
(tomwhite)
- HADOOP-6323. Add comparators to the serialization API.
- (Aaron Kimball via cutting)
-
HADOOP-6433. Introduce asychronous deletion of files via a pool of
threads. This can be used to delete files in the Distributed
Cache. (Zheng Shao via dhruba)
@@ -129,18 +126,12 @@ Trunk (unreleased changes)
HADOOP-6472. add tokenCache option to GenericOptionsParser for passing
file with secret keys to a map reduce job. (boryas)
- HADOOP-6443. Serialization classes accept invalid metadata.
- (Aaron Kimball via tomwhite)
-
HADOOP-3205. Read multiple chunks directly from FSInputChecker subclass
into user buffers. (Todd Lipcon via tomwhite)
HADOOP-6479. TestUTF8 assertions could fail with better text.
(Steve Loughran via tomwhite)
- HADOOP-6420. Add functionality permitting subsets of Configuration to be
- interpreted as Map. (Aaron Kimball via cdouglas)
-
HADOOP-6155. Deprecate RecordIO anticipating Avro. (Tom White via cdouglas)
HADOOP-6492. Make some Avro serialization APIs public.
@@ -564,8 +555,6 @@ Release 0.21.0 - Unreleased
the io package and makes it available to other users (MAPREDUCE-318).
(Jothi Padmanabhan via ddas)
- HADOOP-6165. Add metadata to Serializations. (tomwhite)
-
HADOOP-6105. Adds support for automatically handling deprecation of
configuration keys. (V.V.Chaitanya Krishna via yhemanth)
diff --git a/src/java/core-default.xml b/src/java/core-default.xml
index 8baa34b4af8..d5c8fee0ee4 100644
--- a/src/java/core-default.xml
+++ b/src/java/core-default.xml
@@ -124,7 +124,7 @@
io.serializations
- org.apache.hadoop.io.serializer.WritableSerialization,org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization,org.apache.hadoop.io.serializer.avro.AvroReflectSerialization,org.apache.hadoop.io.serializer.avro.AvroGenericSerialization
+ org.apache.hadoop.io.serializer.WritableSerialization,org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization,org.apache.hadoop.io.serializer.avro.AvroReflectSerializationA list of serialization classes that can be used for
obtaining serializers and deserializers.
diff --git a/src/java/org/apache/hadoop/conf/Configuration.java b/src/java/org/apache/hadoop/conf/Configuration.java
index d49fd1c3825..8fbba639302 100644
--- a/src/java/org/apache/hadoop/conf/Configuration.java
+++ b/src/java/org/apache/hadoop/conf/Configuration.java
@@ -31,7 +31,6 @@ import java.io.OutputStreamWriter;
import java.io.Reader;
import java.io.Writer;
import java.net.URL;
-import java.util.AbstractMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -1047,138 +1046,6 @@ public class Configuration implements Iterable>,
set(name, StringUtils.arrayToString(values));
}
- /**
- * Instantiates a map view over a subset of the entries in
- * the Configuration. This is instantiated by getMap(), which
- * binds a prefix of the namespace to the ConfigItemMap. This
- * mapping reflects changes to the underlying Configuration.
- *
- * This map does not support iteration.
- */
- protected class ConfigItemMap extends AbstractMap
- implements Map {
-
- private final String prefix;
-
- public ConfigItemMap(String prefix) {
- this.prefix = prefix;
- }
-
- @Override
- public boolean containsKey(Object key) {
- return lookup(key.toString()) != null;
- }
-
- @Override
- public Set> entrySet() {
- throw new UnsupportedOperationException("unsupported");
- }
-
- @Override
- public boolean equals(Object o) {
- return o != null && o instanceof ConfigItemMap
- && prefix.equals(((ConfigItemMap) o).prefix)
- && Configuration.this == ((ConfigItemMap) o).getConfiguration();
- }
-
- private Configuration getConfiguration() {
- return Configuration.this;
- }
-
- @Override
- public String get(Object key) {
- if (null == key) {
- return null;
- }
-
- return lookup(key.toString());
- }
-
- @Override
- public int hashCode() {
- return prefix.hashCode();
- }
-
- @Override
- public String put(String key, String val) {
- if (null == key) {
- return null;
- }
-
- String ret = get(key);
- Configuration.this.set(prefix + key, val);
- return ret;
- }
-
- @Override
- public void putAll(Map extends String, ? extends String> m) {
- for (Map.Entry extends String, ? extends String> entry : m.entrySet()) {
- put(entry.getKey(), entry.getValue());
- }
- }
-
- private String lookup(String subKey) {
- String configKey = prefix + subKey;
- Properties props = Configuration.this.getProps();
- Object val = props.get(configKey);
- String str = null;
- if (null != val) {
- str = substituteVars(val.toString());
- }
-
- return str;
- }
- }
-
- /**
- * Given a string -> string map as a value, embed this in the
- * Configuration by prepending 'name' to all the keys in the valueMap,
- * and storing it inside the current Configuration.
- *
- * e.g., setMap("foo", { "bar" -> "a", "baz" -> "b" }) would
- * insert "foo.bar" -> "a" and "foo.baz" -> "b" in this
- * Configuration.
- *
- * @param name the prefix to attach to all keys in the valueMap. This
- * should not have a trailing "." character.
- * @param valueMap the map to embed in the Configuration.
- */
- public void setMap(String name, Map valueMap) {
- // Store all elements of the map proper.
- for (Map.Entry entry : valueMap.entrySet()) {
- set(name + "." + entry.getKey(), entry.getValue());
- }
- }
-
- /**
- * Returns a map containing a view of all configuration properties
- * whose names begin with "name.*", with the "name." prefix removed.
- * e.g., if "foo.bar" -> "a" and "foo.baz" -> "b" are in the
- * Configuration, getMap("foo") would return { "bar" -> "a",
- * "baz" -> "b" }.
- *
- * Map name deprecation is handled via "prefix deprecation"; the individual
- * keys created in a configuration by inserting a map do not need to be
- * individually deprecated -- it is sufficient to deprecate the 'name'
- * associated with the map and bind that to a new name. e.g., if "foo"
- * is deprecated for "newfoo," and the configuration contains entries for
- * "newfoo.a" and "newfoo.b", getMap("foo") will return a map containing
- * the keys "a" and "b".
- *
- * The returned map does not support iteration; it is a lazy view over
- * the slice of the configuration whose keys begin with 'name'. Updates
- * to the underlying configuration are reflected in the returned map,
- * and updates to the map will modify the underlying configuration.
- *
- * @param name The prefix of the key names to extract into the output map.
- * @return a String->String map that contains all (k, v) pairs
- * where 'k' begins with 'name.'; the 'name.' prefix is removed in the output.
- */
- public Map getMap(String name) {
- String prefix = handleDeprecation(name) + ".";
- return new ConfigItemMap(prefix);
- }
-
/**
* Load a class by name.
*
diff --git a/src/java/org/apache/hadoop/io/DefaultStringifier.java b/src/java/org/apache/hadoop/io/DefaultStringifier.java
index ff606ff3eb3..124a550942d 100644
--- a/src/java/org/apache/hadoop/io/DefaultStringifier.java
+++ b/src/java/org/apache/hadoop/io/DefaultStringifier.java
@@ -21,21 +21,20 @@ package org.apache.hadoop.io;
import java.io.IOException;
import java.nio.charset.UnsupportedCharsetException;
import java.util.ArrayList;
-import java.util.Map;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.serializer.DeserializerBase;
-import org.apache.hadoop.io.serializer.SerializationBase;
+import org.apache.hadoop.io.serializer.Deserializer;
+import org.apache.hadoop.io.serializer.Serialization;
import org.apache.hadoop.io.serializer.SerializationFactory;
-import org.apache.hadoop.io.serializer.SerializerBase;
+import org.apache.hadoop.io.serializer.Serializer;
import org.apache.hadoop.util.GenericsUtil;
/**
* DefaultStringifier is the default implementation of the {@link Stringifier}
* interface which stringifies the objects using base64 encoding of the
- * serialized version of the objects. The {@link SerializerBase} and
- * {@link DeserializerBase} are obtained from the {@link SerializationFactory}.
+ * serialized version of the objects. The {@link Serializer} and
+ * {@link Deserializer} are obtained from the {@link SerializationFactory}.
*
* DefaultStringifier offers convenience methods to store/load objects to/from
* the configuration.
@@ -46,9 +45,9 @@ public class DefaultStringifier implements Stringifier {
private static final String SEPARATOR = ",";
- private SerializerBase serializer;
+ private Serializer serializer;
- private DeserializerBase deserializer;
+ private Deserializer deserializer;
private DataInputBuffer inBuf;
@@ -57,9 +56,8 @@ public class DefaultStringifier implements Stringifier {
public DefaultStringifier(Configuration conf, Class c) {
SerializationFactory factory = new SerializationFactory(conf);
- Map metadata = SerializationBase.getMetadataFromClass(c);
- this.serializer = factory.getSerializer(metadata);
- this.deserializer = factory.getDeserializer(metadata);
+ this.serializer = factory.getSerializer(c);
+ this.deserializer = factory.getDeserializer(c);
this.inBuf = new DataInputBuffer();
this.outBuf = new DataOutputBuffer();
try {
@@ -104,7 +102,7 @@ public class DefaultStringifier implements Stringifier {
* @param item the object to be stored
* @param keyName the name of the key to use
* @throws IOException : forwards Exceptions from the underlying
- * {@link SerializationBase} classes.
+ * {@link Serialization} classes.
*/
public static void store(Configuration conf, K item, String keyName)
throws IOException {
@@ -124,7 +122,7 @@ public class DefaultStringifier implements Stringifier {
* @param itemClass the class of the item
* @return restored object
* @throws IOException : forwards Exceptions from the underlying
- * {@link SerializationBase} classes.
+ * {@link Serialization} classes.
*/
public static K load(Configuration conf, String keyName,
Class itemClass) throws IOException {
@@ -147,7 +145,7 @@ public class DefaultStringifier implements Stringifier {
* @param keyName the name of the key to use
* @throws IndexOutOfBoundsException if the items array is empty
* @throws IOException : forwards Exceptions from the underlying
- * {@link SerializationBase} classes.
+ * {@link Serialization} classes.
*/
public static void storeArray(Configuration conf, K[] items,
String keyName) throws IOException {
@@ -175,7 +173,7 @@ public class DefaultStringifier implements Stringifier {
* @param itemClass the class of the item
* @return restored object
* @throws IOException : forwards Exceptions from the underlying
- * {@link SerializationBase} classes.
+ * {@link Serialization} classes.
*/
public static K[] loadArray(Configuration conf, String keyName,
Class itemClass) throws IOException {
diff --git a/src/java/org/apache/hadoop/io/SequenceFile.java b/src/java/org/apache/hadoop/io/SequenceFile.java
index 5903884f28b..d3e441cccb7 100644
--- a/src/java/org/apache/hadoop/io/SequenceFile.java
+++ b/src/java/org/apache/hadoop/io/SequenceFile.java
@@ -33,9 +33,8 @@ import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
-import org.apache.hadoop.io.serializer.DeserializerBase;
-import org.apache.hadoop.io.serializer.SerializationBase;
-import org.apache.hadoop.io.serializer.SerializerBase;
+import org.apache.hadoop.io.serializer.Deserializer;
+import org.apache.hadoop.io.serializer.Serializer;
import org.apache.hadoop.io.serializer.SerializationFactory;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.util.Progressable;
@@ -706,14 +705,6 @@ public class SequenceFile {
return new TreeMap(this.theMetadata);
}
- public Map getMetadataAsStringMap() {
- Map map = new HashMap();
- for (Map.Entry entry : theMetadata.entrySet()) {
- map.put(entry.getKey().toString(), entry.getValue().toString());
- }
- return map;
- }
-
public void write(DataOutput out) throws IOException {
out.writeInt(this.theMetadata.size());
Iterator> iter =
@@ -810,9 +801,9 @@ public class SequenceFile {
Metadata metadata = null;
Compressor compressor = null;
- protected SerializerBase keySerializer;
- protected SerializerBase uncompressedValSerializer;
- protected SerializerBase compressedValSerializer;
+ protected Serializer keySerializer;
+ protected Serializer uncompressedValSerializer;
+ protected Serializer compressedValSerializer;
// Insert a globally unique 16-byte value every few entries, so that one
// can seek into the middle of a file and then synchronize with record
@@ -923,10 +914,9 @@ public class SequenceFile {
this.codec = codec;
this.metadata = metadata;
SerializationFactory serializationFactory = new SerializationFactory(conf);
- this.keySerializer = getSerializer(serializationFactory, keyClass, metadata);
+ this.keySerializer = serializationFactory.getSerializer(keyClass);
this.keySerializer.open(buffer);
- this.uncompressedValSerializer = getSerializer(serializationFactory,
- valClass, metadata);
+ this.uncompressedValSerializer = serializationFactory.getSerializer(valClass);
this.uncompressedValSerializer.open(buffer);
if (this.codec != null) {
ReflectionUtils.setConf(this.codec, this.conf);
@@ -934,20 +924,11 @@ public class SequenceFile {
this.deflateFilter = this.codec.createOutputStream(buffer, compressor);
this.deflateOut =
new DataOutputStream(new BufferedOutputStream(deflateFilter));
- this.compressedValSerializer = getSerializer(serializationFactory,
- valClass, metadata);
+ this.compressedValSerializer = serializationFactory.getSerializer(valClass);
this.compressedValSerializer.open(deflateOut);
}
}
- @SuppressWarnings("unchecked")
- private SerializerBase getSerializer(SerializationFactory sf, Class c,
- Metadata metadata) {
- Map stringMetadata = metadata.getMetadataAsStringMap();
- stringMetadata.put(SerializationBase.CLASS_KEY, c.getName());
- return sf.getSerializer(stringMetadata);
- }
-
/** Returns the class of keys in this file. */
public Class getKeyClass() { return keyClass; }
@@ -1432,8 +1413,8 @@ public class SequenceFile {
private DataInputStream valIn = null;
private Decompressor valDecompressor = null;
- private DeserializerBase keyDeserializer;
- private DeserializerBase valDeserializer;
+ private Deserializer keyDeserializer;
+ private Deserializer valDeserializer;
/**
* Construct a reader by opening a file from the given file system.
@@ -1630,24 +1611,21 @@ public class SequenceFile {
SerializationFactory serializationFactory =
new SerializationFactory(conf);
this.keyDeserializer =
- getDeserializer(serializationFactory, getKeyClass(), metadata);
+ getDeserializer(serializationFactory, getKeyClass());
if (!blockCompressed) {
this.keyDeserializer.open(valBuffer);
} else {
this.keyDeserializer.open(keyIn);
}
this.valDeserializer =
- getDeserializer(serializationFactory, getValueClass(), metadata);
+ getDeserializer(serializationFactory, getValueClass());
this.valDeserializer.open(valIn);
}
}
@SuppressWarnings("unchecked")
- private DeserializerBase getDeserializer(SerializationFactory sf, Class c,
- Metadata metadata) {
- Map stringMetadata = metadata.getMetadataAsStringMap();
- stringMetadata.put(SerializationBase.CLASS_KEY, c.getName());
- return sf.getDeserializer(stringMetadata);
+ private Deserializer getDeserializer(SerializationFactory sf, Class c) {
+ return sf.getDeserializer(c);
}
/** Close the file. */
diff --git a/src/java/org/apache/hadoop/io/serializer/Deserializer.java b/src/java/org/apache/hadoop/io/serializer/Deserializer.java
index c2389b4c9e9..1234a57b2b4 100644
--- a/src/java/org/apache/hadoop/io/serializer/Deserializer.java
+++ b/src/java/org/apache/hadoop/io/serializer/Deserializer.java
@@ -34,7 +34,6 @@ import java.io.InputStream;
*
* @param
*/
-@Deprecated
public interface Deserializer {
/**
*
Prepare the deserializer for reading.
diff --git a/src/java/org/apache/hadoop/io/serializer/DeserializerBase.java b/src/java/org/apache/hadoop/io/serializer/DeserializerBase.java
deleted file mode 100644
index 90bbc0e1aa3..00000000000
--- a/src/java/org/apache/hadoop/io/serializer/DeserializerBase.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.serializer;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.InputStream;
-
-import org.apache.hadoop.conf.Configured;
-
-public abstract class DeserializerBase extends Configured
- implements Closeable, Deserializer {
-
- /**
- *
- * Deserialize the next object from the underlying input stream.
- * If the object t is non-null then this deserializer
- * may set its internal state to the next object read from the input
- * stream. Otherwise, if the object t is null a new
- * deserialized object will be created.
- *
- * @return the deserialized object
- */
- public abstract T deserialize(T t) throws IOException;
-
-}
diff --git a/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java b/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java
index 2b1980e995e..70e8b689e9c 100644
--- a/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java
+++ b/src/java/org/apache/hadoop/io/serializer/DeserializerComparator.java
@@ -52,13 +52,6 @@ public abstract class DeserializerComparator implements RawComparator {
this.deserializer.open(buffer);
}
- protected DeserializerComparator(DeserializerBase deserializer)
- throws IOException {
-
- this.deserializer = deserializer;
- this.deserializer.open(buffer);
- }
-
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
try {
diff --git a/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java b/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java
index 2d743574705..57fc0e3b469 100644
--- a/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java
+++ b/src/java/org/apache/hadoop/io/serializer/JavaSerialization.java
@@ -34,10 +34,10 @@ import org.apache.hadoop.io.RawComparator;
*
* @see JavaSerializationComparator
*/
-public class JavaSerialization extends SerializationBase {
+public class JavaSerialization implements Serialization {
static class JavaSerializationDeserializer
- extends DeserializerBase {
+ implements Deserializer {
private ObjectInputStream ois;
@@ -65,15 +65,10 @@ public class JavaSerialization extends SerializationBase {
}
- static class JavaSerializationSerializer
- extends SerializerBase {
+ static class JavaSerializationSerializer
+ implements Serializer {
private ObjectOutputStream oos;
- private Map metadata;
-
- public JavaSerializationSerializer(Map metadata) {
- this.metadata = metadata;
- }
public void open(OutputStream out) throws IOException {
oos = new ObjectOutputStream(out) {
@@ -83,7 +78,7 @@ public class JavaSerialization extends SerializationBase {
};
}
- public void serialize(T object) throws IOException {
+ public void serialize(Serializable object) throws IOException {
oos.reset(); // clear (class) back-references
oos.writeObject(object);
}
@@ -92,53 +87,18 @@ public class JavaSerialization extends SerializationBase {
oos.close();
}
- @Override
- public Map getMetadata() throws IOException {
- return metadata;
- }
}
- public boolean accept(Map metadata) {
- if (!checkSerializationKey(metadata)) {
- return false;
- }
-
- Class> c = getClassFromMetadata(metadata);
+ public boolean accept(Class> c) {
return Serializable.class.isAssignableFrom(c);
}
- public DeserializerBase getDeserializer(
- Map metadata) {
+ public Deserializer getDeserializer(Class c) {
return new JavaSerializationDeserializer();
}
- public SerializerBase getSerializer(
- Map metadata) {
- return new JavaSerializationSerializer(metadata);
+ public Serializer getSerializer(Class c) {
+ return new JavaSerializationSerializer();
}
- @SuppressWarnings("unchecked")
- @Override
- public RawComparator getRawComparator(
- Map metadata) {
- Class> klazz = getClassFromMetadata(metadata);
- if (null == klazz) {
- throw new IllegalArgumentException(
- "Cannot get comparator without " + SerializationBase.CLASS_KEY
- + " set in metadata");
- }
-
- if (Serializable.class.isAssignableFrom(klazz)) {
- try {
- return (RawComparator) new JavaSerializationComparator();
- } catch (IOException ioe) {
- throw new IllegalArgumentException(
- "Could not instantiate JavaSerializationComparator for type "
- + klazz.getName(), ioe);
- }
- } else {
- throw new IllegalArgumentException("Class " + klazz.getName()
- + " is incompatible with JavaSerialization");
- }
- }
}
diff --git a/src/java/org/apache/hadoop/io/serializer/LegacyDeserializer.java b/src/java/org/apache/hadoop/io/serializer/LegacyDeserializer.java
deleted file mode 100644
index 8e5036cc7b7..00000000000
--- a/src/java/org/apache/hadoop/io/serializer/LegacyDeserializer.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.serializer;
-
-import java.io.IOException;
-import java.io.InputStream;
-
-@SuppressWarnings("deprecation")
-class LegacyDeserializer extends DeserializerBase {
-
- private Deserializer deserializer;
-
- public LegacyDeserializer(Deserializer deserializer) {
- this.deserializer = deserializer;
- }
-
- @Override
- public void open(InputStream in) throws IOException {
- deserializer.open(in);
- }
-
- @Override
- public T deserialize(T t) throws IOException {
- return deserializer.deserialize(t);
- }
-
- @Override
- public void close() throws IOException {
- deserializer.close();
- }
-
-}
diff --git a/src/java/org/apache/hadoop/io/serializer/LegacySerialization.java b/src/java/org/apache/hadoop/io/serializer/LegacySerialization.java
deleted file mode 100644
index e97a673a32d..00000000000
--- a/src/java/org/apache/hadoop/io/serializer/LegacySerialization.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.io.serializer;
-
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.RawComparator;
-
-/**
- *
- * Wraps a legacy {@link Serialization} as a {@link SerializationBase}.
- *
- *
- * @param
- */
-@SuppressWarnings("deprecation")
-class LegacySerialization extends SerializationBase {
-
- private Serialization serialization;
-
- public LegacySerialization(Serialization serialization,
- Configuration conf) {
- this.serialization = serialization;
- setConf(conf);
- }
-
- Serialization getUnderlyingSerialization() {
- return serialization;
- }
-
- @Deprecated
- @Override
- public boolean accept(Class> c) {
- return serialization.accept(c);
- }
-
- @Deprecated
- @Override
- public Deserializer getDeserializer(Class c) {
- return serialization.getDeserializer(c);
- }
-
- @Deprecated
- @Override
- public Serializer getSerializer(Class c) {
- return serialization.getSerializer(c);
- }
-
- @Override
- public boolean accept(Map metadata) {
- Class> c = getClassFromMetadata(metadata);
- return accept(c);
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public SerializerBase getSerializer(Map metadata) {
- Class c = (Class) getClassFromMetadata(metadata);
- return new LegacySerializer(getSerializer(c));
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public DeserializerBase getDeserializer(Map metadata) {
- Class c = (Class) getClassFromMetadata(metadata);
- return new LegacyDeserializer(getDeserializer(c));
- }
-
- @Override
- public RawComparator getRawComparator(Map metadata) {
- // Since this method is being added to an API meant to provide legacy
- // compatability with deprecated serializers, leaving this as an incomplete
- // stub.
-
- throw new UnsupportedOperationException(
- "LegacySerialization does not provide raw comparators");
- }
-
-}
diff --git a/src/java/org/apache/hadoop/io/serializer/LegacySerializer.java b/src/java/org/apache/hadoop/io/serializer/LegacySerializer.java
deleted file mode 100644
index f623cab1714..00000000000
--- a/src/java/org/apache/hadoop/io/serializer/LegacySerializer.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.serializer;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.Collections;
-import java.util.Map;
-
-@SuppressWarnings("deprecation")
-class LegacySerializer extends SerializerBase {
-
- private Serializer serializer;
-
- public LegacySerializer(Serializer serializer) {
- this.serializer = serializer;
- }
-
- @Override
- public void open(OutputStream out) throws IOException {
- serializer.open(out);
- }
-
- @Override
- public void serialize(T t) throws IOException {
- serializer.serialize(t);
- }
-
- @Override
- public void close() throws IOException {
- serializer.close();
- }
-
- @Override
- public Map getMetadata() throws IOException {
- return Collections.emptyMap();
- }
-
-}
diff --git a/src/java/org/apache/hadoop/io/serializer/Serialization.java b/src/java/org/apache/hadoop/io/serializer/Serialization.java
index ec9681e3c84..6e724bd78b1 100644
--- a/src/java/org/apache/hadoop/io/serializer/Serialization.java
+++ b/src/java/org/apache/hadoop/io/serializer/Serialization.java
@@ -24,7 +24,6 @@ package org.apache.hadoop.io.serializer;
*
* @param
*/
-@Deprecated
public interface Serialization {
/**
diff --git a/src/java/org/apache/hadoop/io/serializer/SerializationBase.java b/src/java/org/apache/hadoop/io/serializer/SerializationBase.java
deleted file mode 100644
index 08e9d5bab02..00000000000
--- a/src/java/org/apache/hadoop/io/serializer/SerializationBase.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.io.serializer;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.io.RawComparator;
-
-/**
- *
- * Encapsulates a {@link SerializerBase}/{@link DeserializerBase} pair.
- *
- *
- * @param
- */
-public abstract class SerializationBase extends Configured
- implements Serialization {
-
- public static final String SERIALIZATION_KEY = "Serialization-Class";
- public static final String CLASS_KEY = "Serialized-Class";
-
- public static Map getMetadataFromClass(Class> c) {
- Map metadata = new HashMap();
- metadata.put(CLASS_KEY, c.getName());
- return metadata;
- }
-
- @Deprecated
- @Override
- public boolean accept(Class> c) {
- return accept(getMetadataFromClass(c));
- }
-
- @Deprecated
- @Override
- public Deserializer getDeserializer(Class c) {
- return getDeserializer(getMetadataFromClass(c));
- }
-
- @Deprecated
- @Override
- public Serializer getSerializer(Class c) {
- return getSerializer(getMetadataFromClass(c));
- }
-
- /**
- * Allows clients to test whether this {@link SerializationBase} supports the
- * given metadata.
- */
- public abstract boolean accept(Map metadata);
-
- /**
- * @return a {@link SerializerBase} for the given metadata.
- */
- public abstract SerializerBase getSerializer(Map metadata);
-
- /**
- * @return a {@link DeserializerBase} for the given metadata.
- */
- public abstract DeserializerBase getDeserializer(
- Map metadata);
-
- public Class> getClassFromMetadata(Map metadata) {
- String classname = metadata.get(CLASS_KEY);
- if (classname == null) {
- return null;
- }
- try {
- return getConf().getClassByName(classname);
- } catch (ClassNotFoundException e) {
- throw new IllegalArgumentException(e);
- }
- }
-
- /** Provide a raw comparator for the specified serializable class.
- * Requires a serialization-specific metadata entry to name the class
- * to compare (e.g., "Serialized-Class" for JavaSerialization and
- * WritableSerialization).
- * @param metadata a set of string mappings providing serialization-specific
- * arguments that parameterize the data being serialized/compared.
- * @return a {@link RawComparator} for the given metadata.
- * @throws UnsupportedOperationException if it cannot instantiate a RawComparator
- * for this given metadata.
- */
- public abstract RawComparator getRawComparator(Map metadata);
-
- /**
- * Check that the SERIALIZATION_KEY, if set, matches the current class.
- * @param metadata the serialization metadata to check.
- * @return true if SERIALIZATION_KEY is unset, or if it matches the current class
- * (meaning that accept() should continue processing), or false if it is a mismatch,
- * meaning that accept() should return false.
- */
- protected boolean checkSerializationKey(Map metadata) {
- String intendedSerializer = metadata.get(SERIALIZATION_KEY);
- return intendedSerializer == null ||
- getClass().getName().equals(intendedSerializer);
- }
-}
diff --git a/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java b/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java
index 9dec1537cf0..73c98456942 100644
--- a/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java
+++ b/src/java/org/apache/hadoop/io/serializer/SerializationFactory.java
@@ -20,13 +20,11 @@ package org.apache.hadoop.io.serializer;
import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.io.serializer.avro.AvroGenericSerialization;
import org.apache.hadoop.io.serializer.avro.AvroReflectSerialization;
import org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization;
import org.apache.hadoop.util.ReflectionUtils;
@@ -34,7 +32,7 @@ import org.apache.hadoop.util.StringUtils;
/**
*
- * A factory for {@link SerializationBase}s.
+ * A factory for {@link Serialization}s.
*
*/
public class SerializationFactory extends Configured {
@@ -42,10 +40,7 @@ public class SerializationFactory extends Configured {
private static final Log LOG =
LogFactory.getLog(SerializationFactory.class.getName());
- private List> serializations =
- new ArrayList>();
- private List> legacySerializations =
- new ArrayList>();
+ private List> serializations = new ArrayList>();
/**
*
@@ -59,8 +54,7 @@ public class SerializationFactory extends Configured {
for (String serializerName : conf.getStrings("io.serializations",
new String[]{WritableSerialization.class.getName(),
AvroSpecificSerialization.class.getName(),
- AvroReflectSerialization.class.getName(),
- AvroGenericSerialization.class.getName()})) {
+ AvroReflectSerialization.class.getName()})) {
add(conf, serializerName);
}
}
@@ -68,64 +62,32 @@ public class SerializationFactory extends Configured {
@SuppressWarnings("unchecked")
private void add(Configuration conf, String serializationName) {
try {
- Class> serializationClass = conf.getClassByName(serializationName);
- if (SerializationBase.class.isAssignableFrom(serializationClass)) {
- serializations.add((SerializationBase)
- ReflectionUtils.newInstance(serializationClass, getConf()));
- } else if (Serialization.class.isAssignableFrom(serializationClass)) {
- Serialization serialization = (Serialization)
- ReflectionUtils.newInstance(serializationClass, getConf());
- legacySerializations.add(new LegacySerialization(serialization,
- getConf()));
- } else {
- LOG.warn("Serialization class " + serializationName + " is not an " +
- "instance of Serialization or BaseSerialization.");
- }
+ Class extends Serialization> serializionClass =
+ (Class extends Serialization>) conf.getClassByName(serializationName);
+ serializations.add((Serialization)
+ ReflectionUtils.newInstance(serializionClass, getConf()));
} catch (ClassNotFoundException e) {
LOG.warn("Serialization class not found: " +
StringUtils.stringifyException(e));
}
}
- @Deprecated
public Serializer getSerializer(Class c) {
return getSerialization(c).getSerializer(c);
}
- @Deprecated
public Deserializer getDeserializer(Class c) {
return getSerialization(c).getDeserializer(c);
}
- @Deprecated
- public Serialization getSerialization(Class c) {
- return getSerialization(SerializationBase.getMetadataFromClass(c));
- }
-
- public SerializerBase getSerializer(Map metadata) {
- SerializationBase serialization = getSerialization(metadata);
- return serialization.getSerializer(metadata);
- }
-
- public DeserializerBase getDeserializer(Map metadata) {
- SerializationBase serialization = getSerialization(metadata);
- return serialization.getDeserializer(metadata);
- }
-
@SuppressWarnings("unchecked")
- public SerializationBase getSerialization(Map metadata) {
- for (SerializationBase serialization : serializations) {
- if (serialization.accept(metadata)) {
- return (SerializationBase) serialization;
- }
- }
- // Look in the legacy serializations last, since they ignore
- // non-class metadata
- for (SerializationBase serialization : legacySerializations) {
- if (serialization.accept(metadata)) {
- return (SerializationBase) serialization;
+ public Serialization getSerialization(Class c) {
+ for (Serialization serialization : serializations) {
+ if (serialization.accept(c)) {
+ return (Serialization) serialization;
}
}
return null;
}
+
}
diff --git a/src/java/org/apache/hadoop/io/serializer/SerializerBase.java b/src/java/org/apache/hadoop/io/serializer/SerializerBase.java
deleted file mode 100644
index 7c2cb347d21..00000000000
--- a/src/java/org/apache/hadoop/io/serializer/SerializerBase.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.io.serializer;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configured;
-
-public abstract class SerializerBase extends Configured
- implements Closeable, Serializer {
-
- /**
- *
- */
- public abstract void serialize(T t) throws IOException;
-
- public abstract Map getMetadata() throws IOException;
-
-}
diff --git a/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java b/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java
index 987888a8b57..2e08a860dc8 100644
--- a/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java
+++ b/src/java/org/apache/hadoop/io/serializer/WritableSerialization.java
@@ -26,20 +26,19 @@ import java.io.OutputStream;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.RawComparator;
+import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.util.ReflectionUtils;
-import org.apache.hadoop.util.StringUtils;
/**
- * A {@link SerializationBase} for {@link Writable}s that delegates to
+ * A {@link Serialization} for {@link Writable}s that delegates to
* {@link Writable#write(java.io.DataOutput)} and
* {@link Writable#readFields(java.io.DataInput)}.
*/
-public class WritableSerialization extends SerializationBase {
- static class WritableDeserializer extends DeserializerBase {
+public class WritableSerialization extends Configured
+ implements Serialization {
+ static class WritableDeserializer extends Configured
+ implements Deserializer {
private Class> writableClass;
private DataInputStream dataIn;
@@ -78,30 +77,10 @@ public class WritableSerialization extends SerializationBase {
}
- static class WritableSerializer extends SerializerBase {
+ static class WritableSerializer extends Configured implements
+ Serializer {
- private Map metadata;
private DataOutputStream dataOut;
- private Class> serializedClass;
-
- public WritableSerializer(Configuration conf,
- Map metadata) {
- this.metadata = metadata;
-
- // If this metadata specifies a serialized class, memoize the
- // class object for this.
- String className = this.metadata.get(CLASS_KEY);
- if (null != className) {
- try {
- this.serializedClass = conf.getClassByName(className);
- } catch (ClassNotFoundException cnfe) {
- throw new RuntimeException(cnfe);
- }
- } else {
- throw new UnsupportedOperationException("the "
- + CLASS_KEY + " metadata is missing, but is required.");
- }
- }
@Override
public void open(OutputStream out) {
@@ -114,10 +93,6 @@ public class WritableSerialization extends SerializationBase {
@Override
public void serialize(Writable w) throws IOException {
- if (serializedClass != w.getClass()) {
- throw new IOException("Type mismatch in serialization: expected "
- + serializedClass + "; received " + w.getClass());
- }
w.write(dataOut);
}
@@ -126,45 +101,21 @@ public class WritableSerialization extends SerializationBase {
dataOut.close();
}
- @Override
- public Map getMetadata() throws IOException {
- return metadata;
- }
-
}
@Override
- public boolean accept(Map metadata) {
- if (!checkSerializationKey(metadata)) {
- return false;
- }
-
- Class> c = getClassFromMetadata(metadata);
- return c == null ? false : Writable.class.isAssignableFrom(c);
+ public boolean accept(Class> c) {
+ return Writable.class.isAssignableFrom(c);
}
@Override
- public SerializerBase getSerializer(Map metadata) {
- return new WritableSerializer(getConf(), metadata);
+ public Serializer getSerializer(Class c) {
+ return new WritableSerializer();
}
@Override
- public DeserializerBase getDeserializer(Map metadata) {
- Class> c = getClassFromMetadata(metadata);
+ public Deserializer getDeserializer(Class c) {
return new WritableDeserializer(getConf(), c);
}
- @Override
- @SuppressWarnings("unchecked")
- public RawComparator getRawComparator(Map metadata) {
- Class> klazz = getClassFromMetadata(metadata);
- if (null == klazz) {
- throw new IllegalArgumentException(
- "Cannot get comparator without " + SerializationBase.CLASS_KEY
- + " set in metadata");
- }
-
- return (RawComparator) WritableComparator.get(
- (Class)klazz);
- }
}
diff --git a/src/java/org/apache/hadoop/io/serializer/avro/AvroComparator.java b/src/java/org/apache/hadoop/io/serializer/avro/AvroComparator.java
deleted file mode 100644
index 2f499d067f9..00000000000
--- a/src/java/org/apache/hadoop/io/serializer/avro/AvroComparator.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.io.serializer.avro;
-
-import org.apache.avro.Schema;
-import org.apache.avro.io.BinaryData;
-import org.apache.hadoop.io.RawComparator;
-
-/**
- *
- * A {@link RawComparator} that uses Avro to extract data from the
- * source stream and compare their contents without explicit
- * deserialization.
- */
-public class AvroComparator>
- implements RawComparator {
-
- private final Schema schema;
-
- public AvroComparator(final Schema s) {
- this.schema = s;
- }
-
- public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
- return BinaryData.compare(b1, s1, b2, s2, schema);
- }
-
- public int compare(T t1, T t2) {
- return t1.compareTo(t2);
- }
-
-}
diff --git a/src/java/org/apache/hadoop/io/serializer/avro/AvroGenericSerialization.java b/src/java/org/apache/hadoop/io/serializer/avro/AvroGenericSerialization.java
deleted file mode 100644
index ee6f0ace3e2..00000000000
--- a/src/java/org/apache/hadoop/io/serializer/avro/AvroGenericSerialization.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.io.serializer.avro;
-
-import java.util.Map;
-
-import org.apache.avro.Schema;
-import org.apache.avro.generic.GenericData;
-import org.apache.avro.generic.GenericDatumReader;
-import org.apache.avro.generic.GenericDatumWriter;
-import org.apache.avro.io.DatumReader;
-import org.apache.avro.io.DatumWriter;
-import org.apache.hadoop.io.serializer.SerializationBase;
-
-/**
- * Serialization for Avro Generic classes. For a class to be accepted by this
- * serialization it must have a schema specified.
- * The schema used is the one set by {@link AvroSerialization#AVRO_SCHEMA_KEY}.
- */
-@SuppressWarnings("unchecked")
-public class AvroGenericSerialization extends AvroSerialization