HADOOP-6486. fix common classes to work with Avro 1.3 reflection. Contributed by cutting.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@921577 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas White 2010-03-10 21:24:13 +00:00
parent fe01f920a1
commit 74d409c4bb
11 changed files with 140 additions and 19 deletions

View File

@ -1,5 +1,4 @@
Hadoop Change Log
# Add directory level at the storage directory
Trunk (unreleased changes)
@ -188,6 +187,9 @@ Trunk (unreleased changes)
HADOOP-6537 Declare more detailed exceptions in FileContext and
AbstractFileSystem (Suresh Srinivas via Sanjay Radia)
HADOOP-6486. fix common classes to work with Avro 1.3 reflection.
(cutting via tomwhite)
OPTIMIZATIONS
HADOOP-6467. Improve the performance on HarFileSystem.listStatus(..).

View File

@ -19,17 +19,32 @@
<property name="repo.maven.org" value="http://repo1.maven.org/maven2/" override="false"/>
<property name="maven2.pattern" value="[organisation]/[module]/[revision]/[module]-[revision]"/>
<property name="repo.dir" value="${user.home}/.m2/repository"/>
<property name="maven2.pattern.ext" value="${maven2.pattern}.[ext]"/>
<!-- pull in the local repository -->
<include url="${ivy.default.conf.dir}/ivyconf-local.xml"/>
<settings defaultResolver="default"/>
<property name="resolvers" value="default" override="false"/>
<settings defaultResolver="${resolvers}"/>
<resolvers>
<!--ibiblio resolvers-->
<ibiblio name="maven2" root="${repo.maven.org}" m2compatible="true"/>
<filesystem name="fs" m2compatible="true" force="true">
<artifact pattern="${repo.dir}/[organisation]/[module]/[revision]/[module]-[revision].[ext]"/>
<ivy pattern="${repo.dir}/[organisation]/[module]/[revision]/[module]-[revision].pom"/>
</filesystem>
<chain name="default" dual="true">
<resolver ref="maven2"/>
</chain>
<chain name="internal" dual="true">
<resolver ref="fs"/>
<resolver ref="maven2"/>
</chain>
</resolvers>
</ivysettings>

View File

@ -17,7 +17,7 @@
apacheant.version=1.7.1
ant-task.version=2.0.10
avro.version=1.2.0
avro.version=1.3.0
checkstyle.version=4.2
@ -44,8 +44,6 @@ hsqldb.version=1.8.0.10
ivy.version=2.1.0-rc1
jackson.version=1.0.1
jasper.version=5.5.12
jsp.version=2.1
jsp-api.version=5.5.12
@ -65,8 +63,6 @@ mina-core.version=2.0.0-M5
oro.version=2.0.8
paranamer.version=1.5
rats-lib.version=0.6
servlet.version=4.0.6

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.fs;
import java.net.*;
import java.io.*;
import org.apache.avro.reflect.Stringable;
import org.apache.hadoop.conf.Configuration;
@ -27,6 +28,7 @@ import org.apache.hadoop.conf.Configuration;
* Path strings use slash as the directory separator. A path string is
* absolute if it begins with a slash.
*/
@Stringable
public class Path implements Comparable {
/** The directory separator, a slash. */

View File

@ -23,23 +23,35 @@ import java.io.DataOutput;
import java.io.IOException;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.Collection;
import java.util.AbstractCollection;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
/** A Writable wrapper for EnumSet. */
public class EnumSetWritable<E extends Enum<E>> implements Writable,
Configurable {
public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
implements Writable, Configurable {
private EnumSet<E> value;
private Class<E> elementType;
private transient Class<E> elementType;
private Configuration conf;
private transient Configuration conf;
EnumSetWritable() {
}
public Iterator<E> iterator() { return value.iterator(); }
public int size() { return value.size(); }
public boolean add(E e) {
if (value == null) {
value = EnumSet.of(e);
set(value, null);
}
return value.add(e);
}
/**
* Construct a new EnumSetWritable. If the <tt>value</tt> argument is null or
* its size is zero, the <tt>elementType</tt> argument must not be null. If

View File

@ -33,6 +33,8 @@ import java.text.CharacterIterator;
import java.text.StringCharacterIterator;
import java.util.Arrays;
import org.apache.avro.reflect.Stringable;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -45,6 +47,7 @@ import org.apache.commons.logging.LogFactory;
* byte array contains valid UTF8 code, calculating the length of an encoded
* string.
*/
@Stringable
public class Text extends BinaryComparable
implements WritableComparable<BinaryComparable> {
private static final Log LOG= LogFactory.getLog(Text.class);

View File

@ -24,6 +24,7 @@ import java.io.OutputStream;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.io.DecoderFactory;
import org.apache.avro.io.BinaryDecoder;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DatumReader;
@ -125,7 +126,7 @@ public abstract class AvroSerialization<T> extends SerializationBase<T> {
@Override
public void open(InputStream in) throws IOException {
inStream = in;
decoder = new BinaryDecoder(in);
decoder = DecoderFactory.defaultFactory().createBinaryDecoder(in, null);
}
}

View File

@ -23,6 +23,7 @@ import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.AvroTestUtil;
import junit.framework.TestCase;
@ -195,4 +196,10 @@ public class TestPath extends TestCase {
new Path("file").makeQualified(defaultUri, new Path(wd)));
}
public void testAvroReflect() throws Exception {
AvroTestUtil.testReflect
(new Path("foo"),
"{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.fs.Path\"}");
}
}

View File

@ -0,0 +1,58 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io;
import java.io.IOException;
import java.io.ByteArrayOutputStream;
import java.lang.reflect.Type;
import org.apache.avro.Schema;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DecoderFactory;
import static junit.framework.TestCase.assertEquals;
public class AvroTestUtil {
public static void testReflect(Object value, String schema) throws Exception {
testReflect(value, value.getClass(), schema);
}
public static void testReflect(Object value, Type type, String schema)
throws Exception {
// check that schema matches expected
Schema s = ReflectData.get().getSchema(type);
assertEquals(Schema.parse(schema), s);
// check that value is serialized correctly
ReflectDatumWriter<Object> writer = new ReflectDatumWriter<Object>(s);
ByteArrayOutputStream out = new ByteArrayOutputStream();
writer.write(value, new BinaryEncoder(out));
ReflectDatumReader<Object> reader = new ReflectDatumReader<Object>(s);
Object after =
reader.read(null, DecoderFactory.defaultFactory().createBinaryDecoder(
out.toByteArray(), null));
assertEquals(value, after);
}
}

View File

@ -19,7 +19,16 @@
package org.apache.hadoop.io;
import java.io.IOException;
import java.io.ByteArrayOutputStream;
import java.util.EnumSet;
import java.lang.reflect.Type;
import org.apache.avro.Schema;
import org.apache.avro.reflect.ReflectData;
import org.apache.avro.reflect.ReflectDatumWriter;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DecoderFactory;
import junit.framework.TestCase;
@ -100,4 +109,14 @@ public class TestEnumSetWritable extends TestCase {
.readObject(in, null)).get();
assertEquals(read, null);
}
public EnumSetWritable<TestEnumSet> testField;
public void testAvroReflect() throws Exception {
String schema = "{\"type\":\"array\",\"items\":{\"type\":\"enum\",\"name\":\"TestEnumSet\",\"namespace\":\"org.apache.hadoop.io.TestEnumSetWritable$\",\"symbols\":[\"CREATE\",\"OVERWRITE\",\"APPEND\"]},\"java-class\":\"org.apache.hadoop.io.EnumSetWritable\"}";
Type type =
TestEnumSetWritable.class.getField("testField").getGenericType();
AvroTestUtil.testReflect(nonEmptyFlagWritable, type, schema);
}
}

View File

@ -253,6 +253,12 @@ public class TestText extends TestCase {
thread2.join();
}
public void testAvroReflect() throws Exception {
AvroTestUtil.testReflect
(new Text("foo"),
"{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.io.Text\"}");
}
public static void main(String[] args) throws Exception
{
TestText test = new TestText("main");