HDFS-3999. HttpFS OPEN operation expects len parameter, it should be length. (tucu)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1394169 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
50b20477fd
commit
016b86867b
|
@ -261,7 +261,7 @@ public class HttpFSParametersProvider extends ParametersProvider {
|
|||
/**
|
||||
* Parameter name.
|
||||
*/
|
||||
public static final String NAME = "len";
|
||||
public static final String NAME = "length";
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.io.File;
|
|||
import java.io.FileOutputStream;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.Writer;
|
||||
|
@ -146,6 +147,7 @@ public class TestHttpFSServer extends HFSTestCase {
|
|||
conf.set("httpfs.proxyuser." + HadoopUsersConfTestHelper.getHadoopProxyUser() + ".hosts",
|
||||
HadoopUsersConfTestHelper.getHadoopProxyUserHosts());
|
||||
conf.set("httpfs.authentication.signature.secret.file", secretFile.getAbsolutePath());
|
||||
conf.set("httpfs.hadoop.config.dir", hadoopConfDir.toString());
|
||||
File httpfsSite = new File(new File(homeDir, "conf"), "httpfs-site.xml");
|
||||
os = new FileOutputStream(httpfsSite);
|
||||
conf.writeXml(os);
|
||||
|
@ -229,6 +231,31 @@ public class TestHttpFSServer extends HFSTestCase {
|
|||
reader.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
@TestDir
|
||||
@TestJetty
|
||||
@TestHdfs
|
||||
public void testOpenOffsetLength() throws Exception {
|
||||
createHttpFSServer(false);
|
||||
|
||||
byte[] array = new byte[]{0, 1, 2, 3};
|
||||
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
|
||||
fs.mkdirs(new Path("/tmp"));
|
||||
OutputStream os = fs.create(new Path("/tmp/foo"));
|
||||
os.write(array);
|
||||
os.close();
|
||||
|
||||
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
|
||||
URL url = new URL(TestJettyHelper.getJettyURL(),
|
||||
MessageFormat.format("/webhdfs/v1/tmp/foo?user.name={0}&op=open&offset=1&length=2", user));
|
||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
||||
InputStream is = conn.getInputStream();
|
||||
Assert.assertEquals(1, is.read());
|
||||
Assert.assertEquals(2, is.read());
|
||||
Assert.assertEquals(-1, is.read());
|
||||
}
|
||||
|
||||
@Test
|
||||
@TestDir
|
||||
@TestJetty
|
||||
|
|
|
@ -282,6 +282,8 @@ Release 2.0.3-alpha - Unreleased
|
|||
HDFS-4000. TestParallelLocalRead fails with "input ByteBuffers
|
||||
must be direct buffers". (Colin Patrick McCabe via eli)
|
||||
|
||||
HDFS-3999. HttpFS OPEN operation expects len parameter, it should be length. (tucu)
|
||||
|
||||
Release 2.0.2-alpha - 2012-09-07
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
Loading…
Reference in New Issue