From 74d409c4bb9638cabdf6141841ead719af20a423 Mon Sep 17 00:00:00 2001 From: Thomas White Date: Wed, 10 Mar 2010 21:24:13 +0000 Subject: [PATCH] HADOOP-6486. fix common classes to work with Avro 1.3 reflection. Contributed by cutting. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@921577 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 12 ++-- ivy/ivysettings.xml | 21 ++++++- ivy/libraries.properties | 6 +- src/java/org/apache/hadoop/fs/Path.java | 2 + .../org/apache/hadoop/io/EnumSetWritable.java | 22 +++++-- src/java/org/apache/hadoop/io/Text.java | 3 + .../io/serializer/avro/AvroSerialization.java | 3 +- .../core/org/apache/hadoop/fs/TestPath.java | 7 +++ .../org/apache/hadoop/io/AvroTestUtil.java | 58 +++++++++++++++++++ .../apache/hadoop/io/TestEnumSetWritable.java | 19 ++++++ .../core/org/apache/hadoop/io/TestText.java | 6 ++ 11 files changed, 140 insertions(+), 19 deletions(-) create mode 100644 src/test/core/org/apache/hadoop/io/AvroTestUtil.java diff --git a/CHANGES.txt b/CHANGES.txt index d4ffaa7ca1a..178b3ddf21a 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,5 +1,4 @@ Hadoop Change Log -# Add directory level at the storage directory Trunk (unreleased changes) @@ -182,11 +181,14 @@ Trunk (unreleased changes) HADOOP-6589. Provide better error messages when RPC authentication fails. (Kan Zhang via omalley) - HADOOP-6599 Split existing RpcMetrics into RpcMetrics & RpcDetailedMetrics. - (Suresh Srinivas via Sanjay Radia) + HADOOP-6599 Split existing RpcMetrics into RpcMetrics & RpcDetailedMetrics. + (Suresh Srinivas via Sanjay Radia) - HADOOP-6537 Declare more detailed exceptions in FileContext and - AbstractFileSystem (Suresh Srinivas via Sanjay Radia) + HADOOP-6537 Declare more detailed exceptions in FileContext and + AbstractFileSystem (Suresh Srinivas via Sanjay Radia) + + HADOOP-6486. fix common classes to work with Avro 1.3 reflection. + (cutting via tomwhite) OPTIMIZATIONS diff --git a/ivy/ivysettings.xml b/ivy/ivysettings.xml index 739d545970c..21ce1cdfc93 100644 --- a/ivy/ivysettings.xml +++ b/ivy/ivysettings.xml @@ -19,17 +19,32 @@ + - - - + + + + + + + + + + + + + + + + + diff --git a/ivy/libraries.properties b/ivy/libraries.properties index 5f6743740a7..871859be07a 100644 --- a/ivy/libraries.properties +++ b/ivy/libraries.properties @@ -17,7 +17,7 @@ apacheant.version=1.7.1 ant-task.version=2.0.10 -avro.version=1.2.0 +avro.version=1.3.0 checkstyle.version=4.2 @@ -44,8 +44,6 @@ hsqldb.version=1.8.0.10 ivy.version=2.1.0-rc1 -jackson.version=1.0.1 - jasper.version=5.5.12 jsp.version=2.1 jsp-api.version=5.5.12 @@ -65,8 +63,6 @@ mina-core.version=2.0.0-M5 oro.version=2.0.8 -paranamer.version=1.5 - rats-lib.version=0.6 servlet.version=4.0.6 diff --git a/src/java/org/apache/hadoop/fs/Path.java b/src/java/org/apache/hadoop/fs/Path.java index 00ceab49ec5..e6702aa8e5f 100644 --- a/src/java/org/apache/hadoop/fs/Path.java +++ b/src/java/org/apache/hadoop/fs/Path.java @@ -20,6 +20,7 @@ import java.net.*; import java.io.*; +import org.apache.avro.reflect.Stringable; import org.apache.hadoop.conf.Configuration; @@ -27,6 +28,7 @@ * Path strings use slash as the directory separator. A path string is * absolute if it begins with a slash. */ +@Stringable public class Path implements Comparable { /** The directory separator, a slash. */ diff --git a/src/java/org/apache/hadoop/io/EnumSetWritable.java b/src/java/org/apache/hadoop/io/EnumSetWritable.java index 7549dca2b6e..3af36b7df3f 100644 --- a/src/java/org/apache/hadoop/io/EnumSetWritable.java +++ b/src/java/org/apache/hadoop/io/EnumSetWritable.java @@ -23,23 +23,35 @@ import java.io.IOException; import java.util.EnumSet; import java.util.Iterator; +import java.util.Collection; +import java.util.AbstractCollection; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; /** A Writable wrapper for EnumSet. */ -public class EnumSetWritable> implements Writable, - Configurable { +public class EnumSetWritable> extends AbstractCollection + implements Writable, Configurable { private EnumSet value; - private Class elementType; - - private Configuration conf; + private transient Class elementType; + private transient Configuration conf; + EnumSetWritable() { } + public Iterator iterator() { return value.iterator(); } + public int size() { return value.size(); } + public boolean add(E e) { + if (value == null) { + value = EnumSet.of(e); + set(value, null); + } + return value.add(e); + } + /** * Construct a new EnumSetWritable. If the value argument is null or * its size is zero, the elementType argument must not be null. If diff --git a/src/java/org/apache/hadoop/io/Text.java b/src/java/org/apache/hadoop/io/Text.java index 2110a34f490..f7af6c5de57 100644 --- a/src/java/org/apache/hadoop/io/Text.java +++ b/src/java/org/apache/hadoop/io/Text.java @@ -33,6 +33,8 @@ import java.text.StringCharacterIterator; import java.util.Arrays; +import org.apache.avro.reflect.Stringable; + import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -45,6 +47,7 @@ * byte array contains valid UTF8 code, calculating the length of an encoded * string. */ +@Stringable public class Text extends BinaryComparable implements WritableComparable { private static final Log LOG= LogFactory.getLog(Text.class); diff --git a/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java b/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java index b92e8ed8e79..5adaf3debc0 100644 --- a/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java +++ b/src/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java @@ -24,6 +24,7 @@ import java.util.Map; import org.apache.avro.Schema; +import org.apache.avro.io.DecoderFactory; import org.apache.avro.io.BinaryDecoder; import org.apache.avro.io.BinaryEncoder; import org.apache.avro.io.DatumReader; @@ -125,7 +126,7 @@ public T deserialize(T t) throws IOException { @Override public void open(InputStream in) throws IOException { inStream = in; - decoder = new BinaryDecoder(in); + decoder = DecoderFactory.defaultFactory().createBinaryDecoder(in, null); } } diff --git a/src/test/core/org/apache/hadoop/fs/TestPath.java b/src/test/core/org/apache/hadoop/fs/TestPath.java index f11c4671d6f..24c2edbe8ce 100644 --- a/src/test/core/org/apache/hadoop/fs/TestPath.java +++ b/src/test/core/org/apache/hadoop/fs/TestPath.java @@ -23,6 +23,7 @@ import java.net.URISyntaxException; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.AvroTestUtil; import junit.framework.TestCase; @@ -195,4 +196,10 @@ public void testMakeQualified() throws URISyntaxException { new Path("file").makeQualified(defaultUri, new Path(wd))); } + public void testAvroReflect() throws Exception { + AvroTestUtil.testReflect + (new Path("foo"), + "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.fs.Path\"}"); + } + } diff --git a/src/test/core/org/apache/hadoop/io/AvroTestUtil.java b/src/test/core/org/apache/hadoop/io/AvroTestUtil.java new file mode 100644 index 00000000000..8fad425052f --- /dev/null +++ b/src/test/core/org/apache/hadoop/io/AvroTestUtil.java @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.io; + +import java.io.IOException; +import java.io.ByteArrayOutputStream; +import java.lang.reflect.Type; + +import org.apache.avro.Schema; +import org.apache.avro.reflect.ReflectData; +import org.apache.avro.reflect.ReflectDatumWriter; +import org.apache.avro.reflect.ReflectDatumReader; +import org.apache.avro.io.BinaryEncoder; +import org.apache.avro.io.DecoderFactory; + +import static junit.framework.TestCase.assertEquals; + +public class AvroTestUtil { + + public static void testReflect(Object value, String schema) throws Exception { + testReflect(value, value.getClass(), schema); + } + + public static void testReflect(Object value, Type type, String schema) + throws Exception { + + // check that schema matches expected + Schema s = ReflectData.get().getSchema(type); + assertEquals(Schema.parse(schema), s); + + // check that value is serialized correctly + ReflectDatumWriter writer = new ReflectDatumWriter(s); + ByteArrayOutputStream out = new ByteArrayOutputStream(); + writer.write(value, new BinaryEncoder(out)); + ReflectDatumReader reader = new ReflectDatumReader(s); + Object after = + reader.read(null, DecoderFactory.defaultFactory().createBinaryDecoder( + out.toByteArray(), null)); + assertEquals(value, after); + } + +} diff --git a/src/test/core/org/apache/hadoop/io/TestEnumSetWritable.java b/src/test/core/org/apache/hadoop/io/TestEnumSetWritable.java index a512bb1bc2d..2ca6c87f8ed 100644 --- a/src/test/core/org/apache/hadoop/io/TestEnumSetWritable.java +++ b/src/test/core/org/apache/hadoop/io/TestEnumSetWritable.java @@ -19,7 +19,16 @@ package org.apache.hadoop.io; import java.io.IOException; +import java.io.ByteArrayOutputStream; import java.util.EnumSet; +import java.lang.reflect.Type; + +import org.apache.avro.Schema; +import org.apache.avro.reflect.ReflectData; +import org.apache.avro.reflect.ReflectDatumWriter; +import org.apache.avro.reflect.ReflectDatumReader; +import org.apache.avro.io.BinaryEncoder; +import org.apache.avro.io.DecoderFactory; import junit.framework.TestCase; @@ -100,4 +109,14 @@ public void testSerializeAndDeserializeNull() throws IOException { .readObject(in, null)).get(); assertEquals(read, null); } + + public EnumSetWritable testField; + + public void testAvroReflect() throws Exception { + String schema = "{\"type\":\"array\",\"items\":{\"type\":\"enum\",\"name\":\"TestEnumSet\",\"namespace\":\"org.apache.hadoop.io.TestEnumSetWritable$\",\"symbols\":[\"CREATE\",\"OVERWRITE\",\"APPEND\"]},\"java-class\":\"org.apache.hadoop.io.EnumSetWritable\"}"; + Type type = + TestEnumSetWritable.class.getField("testField").getGenericType(); + AvroTestUtil.testReflect(nonEmptyFlagWritable, type, schema); + } + } diff --git a/src/test/core/org/apache/hadoop/io/TestText.java b/src/test/core/org/apache/hadoop/io/TestText.java index 6e004860991..dc6b1bb82a4 100644 --- a/src/test/core/org/apache/hadoop/io/TestText.java +++ b/src/test/core/org/apache/hadoop/io/TestText.java @@ -253,6 +253,12 @@ public void testConcurrentEncodeDecode() throws Exception{ thread2.join(); } + public void testAvroReflect() throws Exception { + AvroTestUtil.testReflect + (new Text("foo"), + "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.io.Text\"}"); + } + public static void main(String[] args) throws Exception { TestText test = new TestText("main");