HDFS-15265. HttpFS: validate content-type in HttpFSUtils. Contributed by hemanthboyina.
(cherry picked from commit 31b2f687ef
)
This commit is contained in:
parent
5187bd37ae
commit
41fad5056c
|
@ -33,6 +33,8 @@ import java.text.MessageFormat;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
/**
|
||||
* Utility methods used by HttpFS classes.
|
||||
*/
|
||||
|
@ -127,8 +129,17 @@ public class HttpFSUtils {
|
|||
* @throws IOException thrown if the <code>InputStream</code> could not be
|
||||
* JSON parsed.
|
||||
*/
|
||||
static Object jsonParse(HttpURLConnection conn) throws IOException {
|
||||
public static Object jsonParse(HttpURLConnection conn) throws IOException {
|
||||
try {
|
||||
String contentType = conn.getContentType();
|
||||
if (contentType != null) {
|
||||
final MediaType parsed = MediaType.valueOf(contentType);
|
||||
if (!MediaType.APPLICATION_JSON_TYPE.isCompatible(parsed)) {
|
||||
throw new IOException("Content-Type \"" + contentType
|
||||
+ "\" is incompatible with \"" + MediaType.APPLICATION_JSON
|
||||
+ "\" (parsed=\"" + parsed + "\")");
|
||||
}
|
||||
}
|
||||
JSONParser parser = new JSONParser();
|
||||
return parser.parse(
|
||||
new InputStreamReader(conn.getInputStream(), StandardCharsets.UTF_8));
|
||||
|
|
|
@ -71,6 +71,8 @@ import org.apache.hadoop.fs.FileSystem;
|
|||
import org.apache.hadoop.fs.FsServerDefaults;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.XAttrCodec;
|
||||
import org.apache.hadoop.fs.http.client.HttpFSUtils;
|
||||
import org.apache.hadoop.fs.http.client.HttpFSFileSystem.Operation;
|
||||
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DataParam;
|
||||
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.NoRedirectParam;
|
||||
import org.apache.hadoop.fs.permission.AclEntry;
|
||||
|
@ -89,6 +91,7 @@ import org.apache.hadoop.security.authentication.util.Signer;
|
|||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.test.HFSTestCase;
|
||||
import org.apache.hadoop.test.HadoopUsersConfTestHelper;
|
||||
import org.apache.hadoop.test.LambdaTestUtils;
|
||||
import org.apache.hadoop.test.TestDir;
|
||||
import org.apache.hadoop.test.TestDirHelper;
|
||||
import org.apache.hadoop.test.TestHdfs;
|
||||
|
@ -1917,4 +1920,32 @@ public class TestHttpFSServer extends HFSTestCase {
|
|||
Assert.assertEquals(TestJettyHelper.getJettyURL() + "/webhdfs/v1" + path,
|
||||
location);
|
||||
}
|
||||
|
||||
@Test
|
||||
@TestDir
|
||||
@TestJetty
|
||||
@TestHdfs
|
||||
public void testContentType() throws Exception {
|
||||
createHttpFSServer(false, false);
|
||||
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
|
||||
Path dir = new Path("/tmp");
|
||||
Path file = new Path(dir, "foo");
|
||||
fs.mkdirs(dir);
|
||||
fs.create(file);
|
||||
|
||||
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
|
||||
URL url = new URL(TestJettyHelper.getJettyURL(), MessageFormat.format(
|
||||
"/webhdfs/v1/tmp/foo?user.name={0}&op=open&offset=1&length=2", user));
|
||||
|
||||
// test jsonParse with non-json type.
|
||||
final HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||
conn.setRequestMethod(Operation.OPEN.getMethod());
|
||||
conn.connect();
|
||||
|
||||
LambdaTestUtils.intercept(IOException.class,
|
||||
"Content-Type \"text/html;charset=iso-8859-1\" "
|
||||
+ "is incompatible with \"application/json\"",
|
||||
() -> HttpFSUtils.jsonParse(conn));
|
||||
conn.disconnect();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue