HDFS-15982. Deleted data using HTTP API should be saved to the trash (#2927)
Reviewed-by: Siyao Meng <siyao@apache.org>
This commit is contained in:
parent
e571025f5b
commit
041488e8f5
|
@ -0,0 +1,50 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.web.resources;
|
||||
|
||||
/**
|
||||
* SkipTrash param to be used by DELETE query.
|
||||
*/
|
||||
public class DeleteSkipTrashParam extends BooleanParam {
|
||||
|
||||
public static final String NAME = "skiptrash";
|
||||
public static final String DEFAULT = FALSE;
|
||||
|
||||
private static final Domain DOMAIN = new Domain(NAME);
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param value the parameter value.
|
||||
*/
|
||||
public DeleteSkipTrashParam(final Boolean value) {
|
||||
super(DOMAIN, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
* @param str a string representation of the parameter value.
|
||||
*/
|
||||
public DeleteSkipTrashParam(final String str) {
|
||||
this(DOMAIN.parse(str));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
|
@ -122,6 +122,7 @@ public class HttpFSFileSystem extends FileSystem
|
|||
public static final String ACLSPEC_PARAM = "aclspec";
|
||||
public static final String DESTINATION_PARAM = "destination";
|
||||
public static final String RECURSIVE_PARAM = "recursive";
|
||||
public static final String SKIP_TRASH_PARAM = "skiptrash";
|
||||
public static final String SOURCES_PARAM = "sources";
|
||||
public static final String OWNER_PARAM = "owner";
|
||||
public static final String GROUP_PARAM = "group";
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.fs.PathFilter;
|
||||
import org.apache.hadoop.fs.QuotaUsage;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
import org.apache.hadoop.fs.Trash;
|
||||
import org.apache.hadoop.fs.XAttrCodec;
|
||||
import org.apache.hadoop.fs.XAttrSetFlag;
|
||||
import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
|
||||
|
@ -53,6 +54,8 @@ import org.apache.hadoop.util.StringUtils;
|
|||
import org.json.simple.JSONArray;
|
||||
import org.json.simple.JSONObject;
|
||||
import org.apache.hadoop.fs.permission.FsCreateModes;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
|
@ -75,6 +78,8 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.HTTP_BUFFER_SIZE_DEFAULT;
|
|||
@InterfaceAudience.Private
|
||||
public final class FSOperations {
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(FSOperations.class);
|
||||
|
||||
private static int bufferSize = 4096;
|
||||
|
||||
private FSOperations() {
|
||||
|
@ -717,18 +722,22 @@ public final class FSOperations {
|
|||
*/
|
||||
@InterfaceAudience.Private
|
||||
public static class FSDelete implements FileSystemAccess.FileSystemExecutor<JSONObject> {
|
||||
private Path path;
|
||||
private boolean recursive;
|
||||
private final Path path;
|
||||
private final boolean recursive;
|
||||
private final boolean skipTrash;
|
||||
|
||||
/**
|
||||
* Creates a Delete executor.
|
||||
*
|
||||
* @param path path to delete.
|
||||
* @param recursive if the delete should be recursive or not.
|
||||
* @param skipTrash if the file must be deleted and not kept in trash
|
||||
* regardless of fs.trash.interval config value.
|
||||
*/
|
||||
public FSDelete(String path, boolean recursive) {
|
||||
public FSDelete(String path, boolean recursive, boolean skipTrash) {
|
||||
this.path = new Path(path);
|
||||
this.recursive = recursive;
|
||||
this.skipTrash = skipTrash;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -743,6 +752,19 @@ public final class FSOperations {
|
|||
*/
|
||||
@Override
|
||||
public JSONObject execute(FileSystem fs) throws IOException {
|
||||
if (!skipTrash) {
|
||||
boolean movedToTrash = Trash.moveToAppropriateTrash(fs, path,
|
||||
fs.getConf());
|
||||
if (movedToTrash) {
|
||||
HttpFSServerWebApp.getMetrics().incrOpsDelete();
|
||||
return toJSON(
|
||||
StringUtils.toLowerCase(HttpFSFileSystem.DELETE_JSON), true);
|
||||
}
|
||||
// Same is the behavior with Delete shell command.
|
||||
// If moveToAppropriateTrash() returns false, file deletion
|
||||
// is attempted rather than throwing Error.
|
||||
LOG.debug("Could not move {} to Trash, attempting removal", path);
|
||||
}
|
||||
boolean deleted = fs.delete(path, recursive);
|
||||
HttpFSServerWebApp.get().getMetrics().incrOpsDelete();
|
||||
return toJSON(
|
||||
|
|
|
@ -81,7 +81,8 @@ public class HttpFSParametersProvider extends ParametersProvider {
|
|||
new Class[]{ReplicationParam.class});
|
||||
PARAMS_DEF.put(Operation.SETTIMES,
|
||||
new Class[]{ModifiedTimeParam.class, AccessTimeParam.class});
|
||||
PARAMS_DEF.put(Operation.DELETE, new Class[]{RecursiveParam.class});
|
||||
PARAMS_DEF.put(Operation.DELETE, new Class[]{RecursiveParam.class,
|
||||
DeleteSkipTrashParam.class});
|
||||
PARAMS_DEF.put(Operation.SETACL, new Class[]{AclPermissionParam.class});
|
||||
PARAMS_DEF.put(Operation.REMOVEACL, new Class[]{});
|
||||
PARAMS_DEF.put(Operation.MODIFYACLENTRIES,
|
||||
|
@ -242,6 +243,25 @@ public class HttpFSParametersProvider extends ParametersProvider {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for delete's skipTrash parameter.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public static class DeleteSkipTrashParam extends BooleanParam {
|
||||
|
||||
/**
|
||||
* Parameter name.
|
||||
*/
|
||||
public static final String NAME = HttpFSFileSystem.SKIP_TRASH_PARAM;
|
||||
|
||||
/**
|
||||
* Constructor.
|
||||
*/
|
||||
public DeleteSkipTrashParam() {
|
||||
super(NAME, false);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Class for filter parameter.
|
||||
*/
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.AccessTimeParam
|
|||
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.AclPermissionParam;
|
||||
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.BlockSizeParam;
|
||||
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DataParam;
|
||||
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DeleteSkipTrashParam;
|
||||
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DestinationParam;
|
||||
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.ECPolicyParam;
|
||||
import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.FilterParam;
|
||||
|
@ -548,9 +549,13 @@ public class HttpFSServer {
|
|||
case DELETE: {
|
||||
Boolean recursive =
|
||||
params.get(RecursiveParam.NAME, RecursiveParam.class);
|
||||
AUDIT_LOG.info("[{}] recursive [{}]", path, recursive);
|
||||
Boolean skipTrashParam = params.get(DeleteSkipTrashParam.NAME,
|
||||
DeleteSkipTrashParam.class);
|
||||
boolean skipTrash = skipTrashParam != null && skipTrashParam;
|
||||
AUDIT_LOG.info("[{}] recursive [{}] skipTrash [{}]", path, recursive,
|
||||
skipTrash);
|
||||
FSOperations.FSDelete command =
|
||||
new FSOperations.FSDelete(path, recursive);
|
||||
new FSOperations.FSDelete(path, recursive, skipTrash);
|
||||
JSONObject json = fsExecute(user, command);
|
||||
response = Response.ok(json).type(MediaType.APPLICATION_JSON).build();
|
||||
break;
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.fs.http.server;
|
||||
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
@ -537,6 +538,36 @@ public class TestHttpFSServer extends HFSTestCase {
|
|||
Assert.assertEquals(HttpURLConnection.HTTP_CREATED, conn.getResponseCode());
|
||||
}
|
||||
|
||||
private void deleteWithHttp(String filename, String perms,
|
||||
String unmaskedPerms, Boolean skipTrash) throws Exception {
|
||||
String user = HadoopUsersConfTestHelper.getHadoopUsers()[0];
|
||||
// Remove leading / from filename
|
||||
if (filename.charAt(0) == '/') {
|
||||
filename = filename.substring(1);
|
||||
}
|
||||
String pathOps;
|
||||
if (perms == null) {
|
||||
pathOps = MessageFormat.format("/webhdfs/v1/{0}?user.name={1}&op=DELETE",
|
||||
filename, user);
|
||||
} else {
|
||||
pathOps = MessageFormat.format(
|
||||
"/webhdfs/v1/{0}?user.name={1}&permission={2}&op=DELETE",
|
||||
filename, user, perms);
|
||||
}
|
||||
if (unmaskedPerms != null) {
|
||||
pathOps = pathOps + "&unmaskedpermission=" + unmaskedPerms;
|
||||
}
|
||||
if (skipTrash != null) {
|
||||
pathOps = pathOps + "&skiptrash=" + skipTrash;
|
||||
}
|
||||
URL url = new URL(TestJettyHelper.getJettyURL(), pathOps);
|
||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||
conn.addRequestProperty("Content-Type", "application/octet-stream");
|
||||
conn.setRequestMethod("DELETE");
|
||||
conn.connect();
|
||||
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
||||
}
|
||||
|
||||
/**
|
||||
* Talks to the http interface to create a directory.
|
||||
*
|
||||
|
@ -782,6 +813,37 @@ public class TestHttpFSServer extends HFSTestCase {
|
|||
Assert.assertTrue("321".equals(getPerms(statusJson)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate create and delete calls.
|
||||
*/
|
||||
@Test
|
||||
@TestDir
|
||||
@TestJetty
|
||||
@TestHdfs
|
||||
public void testCreateDelete() throws Exception {
|
||||
final String dir1 = "/testCreateDelete1";
|
||||
final String path1 = dir1 + "/file1";
|
||||
final String dir2 = "/testCreateDelete2";
|
||||
final String path2 = dir2 + "/file2";
|
||||
|
||||
createHttpFSServer(false, false);
|
||||
final Configuration conf = HttpFSServerWebApp.get()
|
||||
.get(FileSystemAccess.class).getFileSystemConfiguration();
|
||||
conf.setLong(FS_TRASH_INTERVAL_KEY, 5);
|
||||
writeConf(conf, "hdfs-site.xml");
|
||||
|
||||
FileSystem fs = FileSystem.get(TestHdfsHelper.getHdfsConf());
|
||||
fs.mkdirs(new Path(dir1));
|
||||
|
||||
createWithHttp(path1, null);
|
||||
deleteWithHttp(path1, null, null, null);
|
||||
|
||||
fs.mkdirs(new Path(dir2));
|
||||
|
||||
createWithHttp(path2, null);
|
||||
deleteWithHttp(path2, null, null, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate XAttr get/set/remove calls.
|
||||
*/
|
||||
|
|
|
@ -165,11 +165,32 @@
|
|||
<div class="modal-footer">
|
||||
<button type="button" class="btn" data-dismiss="modal">Cancel</button>
|
||||
<button type="button" class="btn btn-success" id="delete-button"
|
||||
data-complete-text="Deleting...">Delete</button>
|
||||
data-complete-text="Deleting..." data-toggle="modal" data-target="#delete-trash-modal">Delete
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal" id="delete-trash-modal" tabindex="-1" role="dialog" aria-hidden="true">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal"
|
||||
aria-hidden="true">×</button>
|
||||
<h4 class="modal-title" id="delete-trash-modal-title">Delete Trash</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="panel-body">
|
||||
<div id="delete-trash-prompt"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn" id="skip-trash-button">Yes (Skip Trash)</button>
|
||||
<button type="button" class="btn btn-success" id="trash-button">No</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-xs-9 col-md-9">
|
||||
|
|
|
@ -82,23 +82,47 @@
|
|||
function delete_path(inode_name, absolute_file_path) {
|
||||
$('#delete-modal-title').text("Delete - " + inode_name);
|
||||
$('#delete-prompt').text("Are you sure you want to delete " + inode_name
|
||||
+ " ?");
|
||||
+ " ?");
|
||||
$('#delete-trash-modal-title').text("Skip Trash - " + inode_name);
|
||||
$('#delete-trash-prompt').text("Skipping Trash might delete file forever."
|
||||
+ " Do you want to skip-trash " + inode_name
|
||||
+ " ? (default behaviour - No)");
|
||||
|
||||
$('#delete-button').click(function() {
|
||||
$('#skip-trash-button').click(function () {
|
||||
// DELETE /webhdfs/v1/<path>?op=DELETE&recursive=<true|false>&skiptrash=true
|
||||
var url = '/webhdfs/v1' + encode_path(absolute_file_path) +
|
||||
'?op=DELETE' + '&recursive=true&skiptrash=true';
|
||||
$.ajax(url,
|
||||
{
|
||||
type: 'DELETE'
|
||||
}).done(function (data) {
|
||||
browse_directory(current_directory);
|
||||
}).fail(network_error_handler(url)
|
||||
).always(function () {
|
||||
$('#delete-modal').modal('hide');
|
||||
$('#delete-button').button('reset');
|
||||
$('#delete-trash-modal').modal('hide');
|
||||
$('#skip-trash-button').button('reset');
|
||||
});
|
||||
})
|
||||
$('#trash-button').click(function () {
|
||||
// DELETE /webhdfs/v1/<path>?op=DELETE&recursive=<true|false>
|
||||
var url = '/webhdfs/v1' + encode_path(absolute_file_path) +
|
||||
'?op=DELETE' + '&recursive=true';
|
||||
|
||||
'?op=DELETE' + '&recursive=true';
|
||||
$.ajax(url,
|
||||
{ type: 'DELETE'
|
||||
}).done(function(data) {
|
||||
browse_directory(current_directory);
|
||||
}).fail(network_error_handler(url)
|
||||
).always(function() {
|
||||
$('#delete-modal').modal('hide');
|
||||
$('#delete-button').button('reset');
|
||||
});
|
||||
{
|
||||
type: 'DELETE'
|
||||
}).done(function (data) {
|
||||
browse_directory(current_directory);
|
||||
}).fail(network_error_handler(url)
|
||||
).always(function () {
|
||||
$('#delete-modal').modal('hide');
|
||||
$('#delete-button').button('reset');
|
||||
$('#delete-trash-modal').modal('hide');
|
||||
$('#trash-button').button('reset');
|
||||
});
|
||||
})
|
||||
|
||||
$('#delete-modal').modal();
|
||||
}
|
||||
|
||||
|
|
|
@ -55,8 +55,10 @@ import javax.ws.rs.core.StreamingOutput;
|
|||
import javax.ws.rs.core.Response.ResponseBuilder;
|
||||
import javax.ws.rs.core.Response.Status;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.QuotaUsage;
|
||||
import org.apache.hadoop.fs.StorageType;
|
||||
import org.apache.hadoop.fs.Trash;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -119,6 +121,9 @@ import org.apache.hadoop.thirdparty.com.google.common.base.Charsets;
|
|||
import org.apache.hadoop.thirdparty.com.google.common.collect.Lists;
|
||||
import com.sun.jersey.spi.container.ResourceFilters;
|
||||
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_DEFAULT;
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
|
||||
|
||||
/** Web-hdfs NameNode implementation. */
|
||||
@Path("")
|
||||
@ResourceFilters(ParamFilter.class)
|
||||
|
@ -1503,10 +1508,13 @@ public class NamenodeWebHdfsMethods {
|
|||
@QueryParam(RecursiveParam.NAME) @DefaultValue(RecursiveParam.DEFAULT)
|
||||
final RecursiveParam recursive,
|
||||
@QueryParam(SnapshotNameParam.NAME) @DefaultValue(SnapshotNameParam.DEFAULT)
|
||||
final SnapshotNameParam snapshotName
|
||||
final SnapshotNameParam snapshotName,
|
||||
@QueryParam(DeleteSkipTrashParam.NAME)
|
||||
@DefaultValue(DeleteSkipTrashParam.DEFAULT)
|
||||
final DeleteSkipTrashParam skiptrash
|
||||
) throws IOException, InterruptedException {
|
||||
return delete(ugi, delegation, username, doAsUser, ROOT, op, recursive,
|
||||
snapshotName);
|
||||
snapshotName, skiptrash);
|
||||
}
|
||||
|
||||
/** Handle HTTP DELETE request. */
|
||||
|
@ -1527,34 +1535,53 @@ public class NamenodeWebHdfsMethods {
|
|||
@QueryParam(RecursiveParam.NAME) @DefaultValue(RecursiveParam.DEFAULT)
|
||||
final RecursiveParam recursive,
|
||||
@QueryParam(SnapshotNameParam.NAME) @DefaultValue(SnapshotNameParam.DEFAULT)
|
||||
final SnapshotNameParam snapshotName
|
||||
final SnapshotNameParam snapshotName,
|
||||
@QueryParam(DeleteSkipTrashParam.NAME)
|
||||
@DefaultValue(DeleteSkipTrashParam.DEFAULT)
|
||||
final DeleteSkipTrashParam skiptrash
|
||||
) throws IOException, InterruptedException {
|
||||
|
||||
init(ugi, delegation, username, doAsUser, path, op, recursive, snapshotName);
|
||||
init(ugi, delegation, username, doAsUser, path, op, recursive,
|
||||
snapshotName, skiptrash);
|
||||
|
||||
return doAs(ugi, new PrivilegedExceptionAction<Response>() {
|
||||
@Override
|
||||
public Response run() throws IOException {
|
||||
return delete(ugi, delegation, username, doAsUser,
|
||||
path.getAbsolutePath(), op, recursive, snapshotName);
|
||||
}
|
||||
});
|
||||
return doAs(ugi, () -> delete(
|
||||
path.getAbsolutePath(), op, recursive, snapshotName, skiptrash));
|
||||
}
|
||||
|
||||
protected Response delete(
|
||||
final UserGroupInformation ugi,
|
||||
final DelegationParam delegation,
|
||||
final UserParam username,
|
||||
final DoAsParam doAsUser,
|
||||
final String fullpath,
|
||||
final DeleteOpParam op,
|
||||
final RecursiveParam recursive,
|
||||
final SnapshotNameParam snapshotName
|
||||
) throws IOException {
|
||||
final SnapshotNameParam snapshotName,
|
||||
final DeleteSkipTrashParam skipTrash) throws IOException {
|
||||
final ClientProtocol cp = getRpcClientProtocol();
|
||||
|
||||
switch(op.getValue()) {
|
||||
case DELETE: {
|
||||
Configuration conf =
|
||||
(Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
|
||||
long trashInterval =
|
||||
conf.getLong(FS_TRASH_INTERVAL_KEY, FS_TRASH_INTERVAL_DEFAULT);
|
||||
if (trashInterval > 0 && !skipTrash.getValue()) {
|
||||
LOG.info("{} is {} , trying to archive {} instead of removing",
|
||||
FS_TRASH_INTERVAL_KEY, trashInterval, fullpath);
|
||||
org.apache.hadoop.fs.Path path =
|
||||
new org.apache.hadoop.fs.Path(fullpath);
|
||||
Configuration clonedConf = new Configuration(conf);
|
||||
// To avoid caching FS objects and prevent OOM issues
|
||||
clonedConf.set("fs.hdfs.impl.disable.cache", "true");
|
||||
FileSystem fs = FileSystem.get(clonedConf);
|
||||
boolean movedToTrash = Trash.moveToAppropriateTrash(fs, path,
|
||||
clonedConf);
|
||||
if (movedToTrash) {
|
||||
final String js = JsonUtil.toJsonString("boolean", true);
|
||||
return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
|
||||
}
|
||||
// Same is the behavior with Delete shell command.
|
||||
// If moveToAppropriateTrash() returns false, file deletion
|
||||
// is attempted rather than throwing Error.
|
||||
LOG.debug("Could not move {} to Trash, attempting removal", fullpath);
|
||||
}
|
||||
final boolean b = cp.delete(fullpath, recursive.getValue());
|
||||
final String js = JsonUtil.toJsonString("boolean", b);
|
||||
return Response.ok(js).type(MediaType.APPLICATION_JSON).build();
|
||||
|
|
|
@ -166,11 +166,32 @@
|
|||
<div class="modal-footer">
|
||||
<button type="button" class="btn" data-dismiss="modal">Cancel</button>
|
||||
<button type="button" class="btn btn-success" id="delete-button"
|
||||
data-complete-text="Deleting...">Delete</button>
|
||||
data-complete-text="Deleting..." data-toggle="modal" data-target="#delete-trash-modal">Delete
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal" id="delete-trash-modal" tabindex="-1" role="dialog" aria-hidden="true">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" data-dismiss="modal"
|
||||
aria-hidden="true">×</button>
|
||||
<h4 class="modal-title" id="delete-trash-modal-title">Delete Trash</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="panel-body">
|
||||
<div id="delete-trash-prompt"></div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn" id="skip-trash-button">Yes (Skip Trash)</button>
|
||||
<button type="button" class="btn btn-success" id="trash-button">No</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-xs-9 col-md-9">
|
||||
|
|
|
@ -83,22 +83,46 @@
|
|||
$('#delete-modal-title').text("Delete - " + inode_name);
|
||||
$('#delete-prompt').text("Are you sure you want to delete " + inode_name
|
||||
+ " ?");
|
||||
$('#delete-trash-modal-title').text("Skip Trash - " + inode_name);
|
||||
$('#delete-trash-prompt').text("Skipping Trash might delete file forever."
|
||||
+ " Do you want to skip-trash " + inode_name
|
||||
+ " ? (default behaviour - No)");
|
||||
|
||||
$('#delete-button').click(function() {
|
||||
$('#skip-trash-button').click(function () {
|
||||
// DELETE /webhdfs/v1/<path>?op=DELETE&recursive=<true|false>&skiptrash=true
|
||||
var url = '/webhdfs/v1' + encode_path(absolute_file_path) +
|
||||
'?op=DELETE' + '&recursive=true&skiptrash=true';
|
||||
$.ajax(url,
|
||||
{
|
||||
type: 'DELETE'
|
||||
}).done(function (data) {
|
||||
browse_directory(current_directory);
|
||||
}).fail(network_error_handler(url)
|
||||
).always(function () {
|
||||
$('#delete-modal').modal('hide');
|
||||
$('#delete-button').button('reset');
|
||||
$('#delete-trash-modal').modal('hide');
|
||||
$('#skip-trash-button').button('reset');
|
||||
});
|
||||
})
|
||||
$('#trash-button').click(function () {
|
||||
// DELETE /webhdfs/v1/<path>?op=DELETE&recursive=<true|false>
|
||||
var url = '/webhdfs/v1' + encode_path(absolute_file_path) +
|
||||
'?op=DELETE' + '&recursive=true';
|
||||
|
||||
'?op=DELETE' + '&recursive=true';
|
||||
$.ajax(url,
|
||||
{ type: 'DELETE'
|
||||
}).done(function(data) {
|
||||
browse_directory(current_directory);
|
||||
}).fail(network_error_handler(url)
|
||||
).always(function() {
|
||||
$('#delete-modal').modal('hide');
|
||||
$('#delete-button').button('reset');
|
||||
});
|
||||
{
|
||||
type: 'DELETE'
|
||||
}).done(function (data) {
|
||||
browse_directory(current_directory);
|
||||
}).fail(network_error_handler(url)
|
||||
).always(function () {
|
||||
$('#delete-modal').modal('hide');
|
||||
$('#delete-button').button('reset');
|
||||
$('#delete-trash-modal').modal('hide');
|
||||
$('#trash-button').button('reset');
|
||||
});
|
||||
})
|
||||
|
||||
$('#delete-modal').modal();
|
||||
}
|
||||
|
||||
|
|
|
@ -462,7 +462,11 @@ See also: [`destination`](#Destination), [FileSystem](../../api/org/apache/hadoo
|
|||
* Submit a HTTP DELETE request.
|
||||
|
||||
curl -i -X DELETE "http://<host>:<port>/webhdfs/v1/<path>?op=DELETE
|
||||
[&recursive=<true |false>]"
|
||||
[&recursive=<true|false>][&skiptrash=<true|false>]"
|
||||
|
||||
Default values of queryparams if not provided:
|
||||
1. recursive: false
|
||||
2. skiptrash: false
|
||||
|
||||
The client receives a response with a [`boolean` JSON object](#Boolean_JSON_Schema):
|
||||
|
||||
|
|
|
@ -18,6 +18,8 @@
|
|||
|
||||
package org.apache.hadoop.hdfs.web;
|
||||
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_USER_GROUP_STATIC_OVERRIDES_DEFAULT;
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.FS_TRASH_INTERVAL_KEY;
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
|
||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_SIZE_KEY;
|
||||
|
@ -58,6 +60,8 @@ import java.util.Map;
|
|||
import java.util.NoSuchElementException;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.hdfs.web.resources.DeleteSkipTrashParam;
|
||||
import org.apache.hadoop.hdfs.web.resources.RecursiveParam;
|
||||
import org.apache.hadoop.thirdparty.com.google.common.collect.ImmutableList;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.hadoop.fs.QuotaUsage;
|
||||
|
@ -1559,8 +1563,12 @@ public class TestWebHDFS {
|
|||
HttpURLConnection.HTTP_OK, conn.getResponseCode());
|
||||
|
||||
JSONObject responseJson = new JSONObject(response);
|
||||
Assert.assertTrue("Response didn't give us a location. " + response,
|
||||
responseJson.has("Location"));
|
||||
if (!TYPE.equals("DELETE")) {
|
||||
Assert.assertTrue("Response didn't give us a location. " + response,
|
||||
responseJson.has("Location"));
|
||||
} else {
|
||||
Assert.assertTrue(responseJson.getBoolean("boolean"));
|
||||
}
|
||||
|
||||
//Test that the DN allows CORS on Create
|
||||
if(TYPE.equals("CREATE")) {
|
||||
|
@ -1572,14 +1580,15 @@ public class TestWebHDFS {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
/**
|
||||
* Test that when "&noredirect=true" is added to operations CREATE, APPEND,
|
||||
* OPEN, and GETFILECHECKSUM the response (which is usually a 307 temporary
|
||||
* redirect) is a 200 with JSON that contains the redirected location
|
||||
*/
|
||||
@Test
|
||||
public void testWebHdfsNoRedirect() throws Exception {
|
||||
final Configuration conf = WebHdfsTestUtil.createConf();
|
||||
conf.setLong(FS_TRASH_INTERVAL_KEY, 5);
|
||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
|
||||
LOG.info("Started cluster");
|
||||
InetSocketAddress addr = cluster.getNameNode().getHttpAddress();
|
||||
|
@ -1618,6 +1627,26 @@ public class TestWebHDFS {
|
|||
+ Param.toSortedString("&", new NoRedirectParam(true)));
|
||||
LOG.info("Sending append request " + url);
|
||||
checkResponseContainsLocation(url, "POST");
|
||||
|
||||
// setup some permission to allow moving file to .Trash location
|
||||
cluster.getFileSystem().setPermission(new Path("/testWebHdfsNoRedirect"),
|
||||
new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
|
||||
Path userDir = new Path(FileSystem.USER_HOME_PREFIX);
|
||||
Path trashDir = new Path(FileSystem.USER_HOME_PREFIX, DEFAULT_HADOOP_HTTP_STATIC_USER);
|
||||
Path trashPath = new Path(FileSystem.USER_HOME_PREFIX,
|
||||
new Path(DEFAULT_HADOOP_HTTP_STATIC_USER, FileSystem.TRASH_PREFIX));
|
||||
cluster.getFileSystem().mkdirs(userDir, FsPermission.getDirDefault());
|
||||
cluster.getFileSystem().mkdir(trashDir, FsPermission.getDirDefault());
|
||||
cluster.getFileSystem().mkdir(trashPath, FsPermission.getDirDefault());
|
||||
cluster.getFileSystem().setOwner(trashPath, DEFAULT_HADOOP_HTTP_STATIC_USER, HADOOP_USER_GROUP_STATIC_OVERRIDES_DEFAULT);
|
||||
cluster.getFileSystem().setPermission(new Path("/"), new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
|
||||
|
||||
url = new URL("http", addr.getHostString(), addr.getPort(),
|
||||
WebHdfsFileSystem.PATH_PREFIX + "/testWebHdfsNoRedirect" + "?op=DELETE"
|
||||
+ Param.toSortedString("&", new RecursiveParam(true))
|
||||
+ Param.toSortedString("&", new DeleteSkipTrashParam(false)));
|
||||
LOG.info("Sending append request " + url);
|
||||
checkResponseContainsLocation(url, "DELETE");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
Loading…
Reference in New Issue