improve logging of failed uploads

git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@917519 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Brett Porter 2010-03-01 13:35:38 +00:00
parent 31891b80e4
commit 09f8d89c57
1 changed files with 91 additions and 78 deletions

View File

@ -19,14 +19,6 @@ package org.apache.maven.archiva.webdav;
* under the License. * under the License.
*/ */
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletResponse;
import org.apache.archiva.scheduler.ArchivaTaskScheduler; import org.apache.archiva.scheduler.ArchivaTaskScheduler;
import org.apache.archiva.scheduler.repository.RepositoryArchivaTaskScheduler; import org.apache.archiva.scheduler.repository.RepositoryArchivaTaskScheduler;
import org.apache.archiva.scheduler.repository.RepositoryTask; import org.apache.archiva.scheduler.repository.RepositoryTask;
@ -67,6 +59,14 @@ import org.joda.time.format.ISODateTimeFormat;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.http.HttpServletResponse;
/** /**
*/ */
public class ArchivaDavResource public class ArchivaDavResource
@ -85,9 +85,9 @@ public class ArchivaDavResource
private DavPropertySet properties = null; private DavPropertySet properties = null;
private LockManager lockManager; private LockManager lockManager;
private final DavSession session; private final DavSession session;
private String remoteAddr; private String remoteAddr;
private final ManagedRepositoryConfiguration repository; private final ManagedRepositoryConfiguration repository;
@ -97,11 +97,11 @@ public class ArchivaDavResource
private List<AuditListener> auditListeners; private List<AuditListener> auditListeners;
private String principal; private String principal;
public static final String COMPLIANCE_CLASS = "1, 2"; public static final String COMPLIANCE_CLASS = "1, 2";
private ArchivaTaskScheduler scheduler; private ArchivaTaskScheduler scheduler;
private Logger log = LoggerFactory.getLogger( ArchivaDavResource.class ); private Logger log = LoggerFactory.getLogger( ArchivaDavResource.class );
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository, public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
@ -109,25 +109,25 @@ public class ArchivaDavResource
MimeTypes mimeTypes, List<AuditListener> auditListeners, MimeTypes mimeTypes, List<AuditListener> auditListeners,
RepositoryArchivaTaskScheduler scheduler ) RepositoryArchivaTaskScheduler scheduler )
{ {
this.localResource = new File( localResource ); this.localResource = new File( localResource );
this.logicalResource = logicalResource; this.logicalResource = logicalResource;
this.locator = locator; this.locator = locator;
this.factory = factory; this.factory = factory;
this.session = session; this.session = session;
// TODO: push into locator as well as moving any references out of the resource factory // TODO: push into locator as well as moving any references out of the resource factory
this.repository = repository; this.repository = repository;
// TODO: these should be pushed into the repository layer, along with the physical file operations in this class // TODO: these should be pushed into the repository layer, along with the physical file operations in this class
this.mimeTypes = mimeTypes; this.mimeTypes = mimeTypes;
this.auditListeners = auditListeners; this.auditListeners = auditListeners;
this.scheduler = scheduler; this.scheduler = scheduler;
} }
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository, public ArchivaDavResource( String localResource, String logicalResource, ManagedRepositoryConfiguration repository,
String remoteAddr, String principal, DavSession session, ArchivaDavResourceLocator locator, String remoteAddr, String principal, DavSession session,
DavResourceFactory factory, MimeTypes mimeTypes, List<AuditListener> auditListeners, ArchivaDavResourceLocator locator, DavResourceFactory factory, MimeTypes mimeTypes,
RepositoryArchivaTaskScheduler scheduler ) List<AuditListener> auditListeners, RepositoryArchivaTaskScheduler scheduler )
{ {
this( localResource, logicalResource, repository, session, locator, factory, mimeTypes, auditListeners, this( localResource, logicalResource, repository, session, locator, factory, mimeTypes, auditListeners,
scheduler ); scheduler );
@ -190,12 +190,12 @@ public class ArchivaDavResource
public void spool( OutputContext outputContext ) public void spool( OutputContext outputContext )
throws IOException throws IOException
{ {
if ( !isCollection()) if ( !isCollection() )
{ {
outputContext.setContentLength( localResource.length() ); outputContext.setContentLength( localResource.length() );
outputContext.setContentType( mimeTypes.getMimeType( localResource.getName() ) ); outputContext.setContentType( mimeTypes.getMimeType( localResource.getName() ) );
} }
if ( !isCollection() && outputContext.hasStream() ) if ( !isCollection() && outputContext.hasStream() )
{ {
FileInputStream is = null; FileInputStream is = null;
@ -210,7 +210,7 @@ public class ArchivaDavResource
IOUtils.closeQuietly( is ); IOUtils.closeQuietly( is );
} }
} }
else if (outputContext.hasStream()) else if ( outputContext.hasStream() )
{ {
IndexWriter writer = new IndexWriter( this, localResource, logicalResource ); IndexWriter writer = new IndexWriter( this, localResource, logicalResource );
writer.write( outputContext ); writer.write( outputContext );
@ -248,7 +248,7 @@ public class ArchivaDavResource
return null; return null;
} }
@SuppressWarnings("unchecked") @SuppressWarnings( "unchecked" )
public MultiStatusResponse alterProperties( List changeList ) public MultiStatusResponse alterProperties( List changeList )
throws DavException throws DavException
{ {
@ -265,7 +265,8 @@ public class ArchivaDavResource
{ {
parentPath = "/"; parentPath = "/";
} }
DavResourceLocator parentloc = locator.getFactory().createResourceLocator( locator.getPrefix(), parentPath ); DavResourceLocator parentloc = locator.getFactory().createResourceLocator( locator.getPrefix(),
parentPath );
try try
{ {
parent = factory.createResource( parentloc, session ); parent = factory.createResource( parentloc, session );
@ -300,34 +301,40 @@ public class ArchivaDavResource
{ {
IOUtils.closeQuietly( stream ); IOUtils.closeQuietly( stream );
} }
// TODO: a bad deployment shouldn't delete an existing file - do we need to write to a temporary location first? // TODO: a bad deployment shouldn't delete an existing file - do we need to write to a temporary location first?
if ( inputContext.getContentLength() != localFile.length() ) if ( inputContext.getContentLength() != localFile.length() )
{ {
FileUtils.deleteQuietly( localFile ); FileUtils.deleteQuietly( localFile );
throw new DavException( HttpServletResponse.SC_BAD_REQUEST, "Content Header length was " + String msg =
inputContext.getContentLength() + " but was " + localFile.length() ); "Content Header length was " + inputContext.getContentLength() + " but was " + localFile.length();
log.debug( "Upload failed: " + msg );
throw new DavException( HttpServletResponse.SC_BAD_REQUEST, msg );
} }
queueRepositoryTask( localFile ); queueRepositoryTask( localFile );
log.debug( "File '" + resource.getDisplayName() + ( exists ? "' modified " : "' created ") + "(current user '" + this.principal + "')" ); log.debug(
"File '" + resource.getDisplayName() + ( exists ? "' modified " : "' created " ) + "(current user '" +
this.principal + "')" );
triggerAuditEvent( resource, exists ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE ); triggerAuditEvent( resource, exists ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE );
} }
else if ( !inputContext.hasStream() && isCollection() ) // New directory else if ( !inputContext.hasStream() && isCollection() ) // New directory
{ {
localFile.mkdir(); localFile.mkdir();
log.debug( "Directory '" + resource.getDisplayName() + "' (current user '" + this.principal + "')" ); log.debug( "Directory '" + resource.getDisplayName() + "' (current user '" + this.principal + "')" );
triggerAuditEvent( resource, AuditEvent.CREATE_DIR ); triggerAuditEvent( resource, AuditEvent.CREATE_DIR );
} }
else else
{ {
throw new DavException( HttpServletResponse.SC_BAD_REQUEST, "Could not write member " + String msg = "Could not write member " + resource.getResourcePath() + " at " + getResourcePath() +
resource.getResourcePath() + " at " + getResourcePath() ); " as this is not a DAV collection";
log.debug( msg );
throw new DavException( HttpServletResponse.SC_BAD_REQUEST, msg );
} }
} }
@ -343,10 +350,10 @@ public class ArchivaDavResource
if ( !item.startsWith( HIDDEN_PATH_PREFIX ) ) if ( !item.startsWith( HIDDEN_PATH_PREFIX ) )
{ {
String path = locator.getResourcePath() + '/' + item; String path = locator.getResourcePath() + '/' + item;
DavResourceLocator resourceLocator = DavResourceLocator resourceLocator = locator.getFactory().createResourceLocator(
locator.getFactory().createResourceLocator( locator.getPrefix(), path ); locator.getPrefix(), path );
DavResource resource = factory.createResource( resourceLocator, session ); DavResource resource = factory.createResource( resourceLocator, session );
if ( resource != null ) if ( resource != null )
{ {
list.add( resource ); list.add( resource );
@ -367,7 +374,7 @@ public class ArchivaDavResource
throws DavException throws DavException
{ {
File resource = checkDavResourceIsArchivaDavResource( member ).getLocalResource(); File resource = checkDavResourceIsArchivaDavResource( member ).getLocalResource();
if ( resource.exists() ) if ( resource.exists() )
{ {
try try
@ -387,7 +394,8 @@ public class ArchivaDavResource
triggerAuditEvent( member, AuditEvent.REMOVE_FILE ); triggerAuditEvent( member, AuditEvent.REMOVE_FILE );
} }
log.debug( ( resource.isDirectory() ? "Directory '" : "File '" ) + member.getDisplayName() + "' removed (current user '" + this.principal + "')" ); log.debug( ( resource.isDirectory() ? "Directory '" : "File '" ) + member.getDisplayName() +
"' removed (current user '" + this.principal + "')" );
} }
catch ( IOException e ) catch ( IOException e )
{ {
@ -400,7 +408,8 @@ public class ArchivaDavResource
} }
} }
private void triggerAuditEvent( DavResource member, String event ) throws DavException private void triggerAuditEvent( DavResource member, String event )
throws DavException
{ {
String path = logicalResource + "/" + member.getDisplayName(); String path = logicalResource + "/" + member.getDisplayName();
@ -437,9 +446,9 @@ public class ArchivaDavResource
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.MOVE_FILE ); triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.MOVE_FILE );
} }
log.debug( ( isCollection() ? "Directory '" : "File '" ) + getLocalResource().getName() + "' moved to '" + log.debug( ( isCollection() ? "Directory '" : "File '" ) + getLocalResource().getName() + "' moved to '" +
destination + "' (current user '" + this.principal + "')" ); destination + "' (current user '" + this.principal + "')" );
} }
catch ( IOException e ) catch ( IOException e )
{ {
@ -476,7 +485,7 @@ public class ArchivaDavResource
triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.COPY_FILE ); triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.COPY_FILE );
} }
log.debug( ( isCollection() ? "Directory '" : "File '" ) + getLocalResource().getName() + "' copied to '" + log.debug( ( isCollection() ? "Directory '" : "File '" ) + getLocalResource().getName() + "' copied to '" +
destination + "' (current user '" + this.principal + "')" ); destination + "' (current user '" + this.principal + "')" );
} }
catch ( IOException e ) catch ( IOException e )
{ {
@ -486,41 +495,41 @@ public class ArchivaDavResource
public boolean isLockable( Type type, Scope scope ) public boolean isLockable( Type type, Scope scope )
{ {
return Type.WRITE.equals(type) && Scope.EXCLUSIVE.equals(scope); return Type.WRITE.equals( type ) && Scope.EXCLUSIVE.equals( scope );
} }
public boolean hasLock( Type type, Scope scope ) public boolean hasLock( Type type, Scope scope )
{ {
return getLock(type, scope) != null; return getLock( type, scope ) != null;
} }
public ActiveLock getLock( Type type, Scope scope ) public ActiveLock getLock( Type type, Scope scope )
{ {
ActiveLock lock = null; ActiveLock lock = null;
if (exists() && Type.WRITE.equals(type) && Scope.EXCLUSIVE.equals(scope)) if ( exists() && Type.WRITE.equals( type ) && Scope.EXCLUSIVE.equals( scope ) )
{ {
lock = lockManager.getLock(type, scope, this); lock = lockManager.getLock( type, scope, this );
} }
return lock; return lock;
} }
public ActiveLock[] getLocks() public ActiveLock[] getLocks()
{ {
ActiveLock writeLock = getLock(Type.WRITE, Scope.EXCLUSIVE); ActiveLock writeLock = getLock( Type.WRITE, Scope.EXCLUSIVE );
return (writeLock != null) ? new ActiveLock[]{writeLock} : new ActiveLock[0]; return ( writeLock != null ) ? new ActiveLock[]{writeLock} : new ActiveLock[0];
} }
public ActiveLock lock( LockInfo lockInfo ) public ActiveLock lock( LockInfo lockInfo )
throws DavException throws DavException
{ {
ActiveLock lock = null; ActiveLock lock = null;
if (isLockable(lockInfo.getType(), lockInfo.getScope())) if ( isLockable( lockInfo.getType(), lockInfo.getScope() ) )
{ {
lock = lockManager.createLock(lockInfo, this); lock = lockManager.createLock( lockInfo, this );
} }
else else
{ {
throw new DavException(DavServletResponse.SC_PRECONDITION_FAILED, "Unsupported lock type or scope."); throw new DavException( DavServletResponse.SC_PRECONDITION_FAILED, "Unsupported lock type or scope." );
} }
return lock; return lock;
} }
@ -528,15 +537,18 @@ public class ArchivaDavResource
public ActiveLock refreshLock( LockInfo lockInfo, String lockToken ) public ActiveLock refreshLock( LockInfo lockInfo, String lockToken )
throws DavException throws DavException
{ {
if (!exists()) { if ( !exists() )
throw new DavException(DavServletResponse.SC_NOT_FOUND); {
throw new DavException( DavServletResponse.SC_NOT_FOUND );
} }
ActiveLock lock = getLock(lockInfo.getType(), lockInfo.getScope()); ActiveLock lock = getLock( lockInfo.getType(), lockInfo.getScope() );
if (lock == null) { if ( lock == null )
throw new DavException(DavServletResponse.SC_PRECONDITION_FAILED, "No lock with the given type/scope present on resource " + getResourcePath()); {
throw new DavException( DavServletResponse.SC_PRECONDITION_FAILED,
"No lock with the given type/scope present on resource " + getResourcePath() );
} }
lock = lockManager.refreshLock(lockInfo, lockToken, this); lock = lockManager.refreshLock( lockInfo, lockToken, this );
return lock; return lock;
} }
@ -544,18 +556,18 @@ public class ArchivaDavResource
public void unlock( String lockToken ) public void unlock( String lockToken )
throws DavException throws DavException
{ {
ActiveLock lock = getLock(Type.WRITE, Scope.EXCLUSIVE); ActiveLock lock = getLock( Type.WRITE, Scope.EXCLUSIVE );
if (lock == null) if ( lock == null )
{ {
throw new DavException(HttpServletResponse.SC_PRECONDITION_FAILED); throw new DavException( HttpServletResponse.SC_PRECONDITION_FAILED );
} }
else if (lock.isLockedByToken(lockToken)) else if ( lock.isLockedByToken( lockToken ) )
{ {
lockManager.releaseLock(lockToken, this); lockManager.releaseLock( lockToken, this );
} }
else else
{ {
throw new DavException(DavServletResponse.SC_LOCKED); throw new DavException( DavServletResponse.SC_LOCKED );
} }
} }
@ -583,14 +595,14 @@ public class ArchivaDavResource
{ {
properties = new DavPropertySet(); properties = new DavPropertySet();
} }
if ( properties != null ) if ( properties != null )
{ {
return properties; return properties;
} }
DavPropertySet properties = new DavPropertySet(); DavPropertySet properties = new DavPropertySet();
// set (or reset) fundamental properties // set (or reset) fundamental properties
if ( getDisplayName() != null ) if ( getDisplayName() != null )
{ {
@ -620,9 +632,9 @@ public class ArchivaDavResource
properties.add( new DefaultDavProperty( DavPropertyName.CREATIONDATE, modifiedDate ) ); properties.add( new DefaultDavProperty( DavPropertyName.CREATIONDATE, modifiedDate ) );
properties.add( new DefaultDavProperty( DavPropertyName.GETCONTENTLENGTH, localResource.length() ) ); properties.add( new DefaultDavProperty( DavPropertyName.GETCONTENTLENGTH, localResource.length() ) );
this.properties = properties; this.properties = properties;
return properties; return properties;
} }
@ -647,9 +659,9 @@ public class ArchivaDavResource
listener.auditEvent( event ); listener.auditEvent( event );
} }
} }
private void queueRepositoryTask( File localFile ) private void queueRepositoryTask( File localFile )
{ {
RepositoryTask task = new RepositoryTask(); RepositoryTask task = new RepositoryTask();
task.setRepositoryId( repository.getId() ); task.setRepositoryId( repository.getId() );
task.setResourceFile( localFile ); task.setResourceFile( localFile );
@ -662,8 +674,9 @@ public class ArchivaDavResource
} }
catch ( TaskQueueException e ) catch ( TaskQueueException e )
{ {
log.error( "Unable to queue repository task to execute consumers on resource file ['" + log.error(
localFile.getName() + "']." ); "Unable to queue repository task to execute consumers on resource file ['" + localFile.getName() +
"']." );
} }
} }
} }