HBASE-6316 Confirm can upgrade to 0.96 from 0.94 by just stopping and restarting

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1396428 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-10-10 00:07:59 +00:00
parent e4e704a044
commit 32abc3835b
3 changed files with 72 additions and 31 deletions

View File

@ -20,12 +20,12 @@ package org.apache.hadoop.hbase.io;
import java.io.BufferedInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.IOException;
import java.io.InputStream;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
@ -175,9 +175,27 @@ public class Reference implements Writable {
*/
public static Reference read(final FileSystem fs, final Path p)
throws IOException {
FSDataInputStream in = fs.open(p);
InputStream in = fs.open(p);
try {
return parseFrom(in);
// I need to be able to move back in the stream if this is not a pb serialization so I can
// do the Writable decoding instead.
in = in.markSupported()? in: new BufferedInputStream(in);
int pblen = ProtobufUtil.lengthOfPBMagic();
in.mark(pblen);
byte [] pbuf = new byte[pblen];
int read = in.read(pbuf);
if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);
// WATCHOUT! Return in middle of function!!!
if (ProtobufUtil.isPBMagicPrefix(pbuf)) return convert(FSProtos.Reference.parseFrom(in));
// Else presume Writables. Need to reset the stream since it didn't start w/ pb.
// We won't bother rewriting thie Reference as a pb since Reference is transitory.
in.reset();
Reference r = new Reference();
DataInputStream dis = new DataInputStream(in);
// Set in = dis so it gets the close below in the finally on our way out.
in = dis;
r.readFields(dis);
return r;
} finally {
in.close();
}
@ -208,32 +226,4 @@ public class Reference implements Writable {
byte [] toByteArray() throws IOException {
return ProtobufUtil.prependPBMagic(convert().toByteArray());
}
/**
* Parses an {@link Reference} instance from the passed in stream. Presumes the
* Reference was serialized to the stream with {@link #toDelimitedByteArray()}
* @param in
* @return An instance of Reference.
* @throws IOException
*/
static Reference parseFrom(final FSDataInputStream in) throws IOException {
// I need to be able to move back in the stream if this is not a pb serialization so I can
// do the Writable decoding instead.
InputStream is = in.markSupported()? in: new BufferedInputStream(in);
int pblen = ProtobufUtil.lengthOfPBMagic();
is.mark(pblen);
byte [] pbuf = new byte[pblen];
int read = is.read(pbuf);
if (read != pblen) throw new IOException("read=" + read + ", wanted=" + pblen);
if (ProtobufUtil.isPBMagicPrefix(pbuf)) {
return convert(FSProtos.Reference.parseFrom(is));
} else {
// Presume Writables. Need to reset the stream since it didn't start w/ pb.
// We won't bother rewriting thie Reference as a pb since Reference is transitory.
in.reset();
Reference r = new Reference();
r.readFields(in);
return r;
}
}
}

View File

@ -0,0 +1,51 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.io;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.SmallTests;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
* Reference tests that run on local fs.
*/
@Category(SmallTests.class)
public class TestReference {
private final HBaseTestingUtility HTU = new HBaseTestingUtility();
/**
* See if we can parse a Reference that was written pre-0.96, i.e. a serialized Writable.
* Exercises the code path that parses Writables.
* @throws IOException
*/
@Test
public void testParsingWritableReference() throws IOException {
// Read a Reference written w/ 0.94 out of the test data dir.
final String datafile = System.getProperty("project.build.testSourceDirectory", "src/test") +
File.separator + "data" + File.separator +
"a6a6562b777440fd9c34885428f5cb61.21e75333ada3d5bafb34bb918f29576c";
FileSystem fs = FileSystem.get(HTU.getConfiguration());
Reference.read(fs, new Path(datafile));
}
}