HDFS-4491. Add/delete files missed in prior commit.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1520482 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Chris Nauroth 2013-09-06 03:14:20 +00:00
parent 5eb618ee1f
commit e10dbf41bc
2 changed files with 68 additions and 6 deletions

View File

@ -30,11 +30,19 @@ import org.apache.hadoop.classification.InterfaceStability;
*/ */
@InterfaceAudience.LimitedPrivate({"HDFS"}) @InterfaceAudience.LimitedPrivate({"HDFS"})
@InterfaceStability.Unstable @InterfaceStability.Unstable
public class URLUtils { public class URLConnectionFactory {
/** /**
* Timeout for socket connects and reads * Timeout for socket connects and reads
*/ */
public static int SOCKET_TIMEOUT = 1*60*1000; // 1 minute public final static int DEFAULT_SOCKET_TIMEOUT = 1*60*1000; // 1 minute
public static final URLConnectionFactory DEFAULT_CONNECTION_FACTORY = new URLConnectionFactory(DEFAULT_SOCKET_TIMEOUT);
private int socketTimeout;
public URLConnectionFactory(int socketTimeout) {
this.socketTimeout = socketTimeout;
}
/** /**
* Opens a url with read and connect timeouts * Opens a url with read and connect timeouts
@ -42,7 +50,7 @@ public class URLUtils {
* @return URLConnection * @return URLConnection
* @throws IOException * @throws IOException
*/ */
public static URLConnection openConnection(URL url) throws IOException { public URLConnection openConnection(URL url) throws IOException {
URLConnection connection = url.openConnection(); URLConnection connection = url.openConnection();
setTimeouts(connection); setTimeouts(connection);
return connection; return connection;
@ -53,8 +61,8 @@ public class URLUtils {
* *
* @param connection URLConnection to set * @param connection URLConnection to set
*/ */
static void setTimeouts(URLConnection connection) { public void setTimeouts(URLConnection connection) {
connection.setConnectTimeout(SOCKET_TIMEOUT); connection.setConnectTimeout(socketTimeout);
connection.setReadTimeout(SOCKET_TIMEOUT); connection.setReadTimeout(socketTimeout);
} }
} }

View File

@ -0,0 +1,54 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.test;
import java.io.File;
import org.apache.hadoop.fs.Path;
public class PathUtils {
public static Path getTestPath(Class<?> caller) {
return getTestPath(caller, true);
}
public static Path getTestPath(Class<?> caller, boolean create) {
return new Path(getTestDirName(caller));
}
public static File getTestDir(Class<?> caller) {
return getTestDir(caller, true);
}
public static File getTestDir(Class<?> caller, boolean create) {
File dir = new File(System.getProperty("test.build.data", "/tmp"), caller.getSimpleName());
if (create) {
dir.mkdirs();
}
return dir;
}
public static String getTestDirName(Class<?> caller) {
return getTestDirName(caller, true);
}
public static String getTestDirName(Class<?> caller, boolean create) {
return getTestDir(caller, create).getAbsolutePath();
}
}