[MRM-1702] use the fileLockLManager in the code with possible race condition

git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@1550636 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Olivier Lamy 2013-12-13 04:46:36 +00:00
parent c3ba717d4c
commit 06cb9ea4f9
11 changed files with 176 additions and 65 deletions

View File

@ -33,7 +33,7 @@ import java.util.concurrent.ConcurrentMap;
/**
* @author Olivier Lamy
*/
@Service("fileLockManager#default")
@Service( "fileLockManager#default" )
public class DefaultFileLockManager
implements FileLockManager
{
@ -45,6 +45,7 @@ public class DefaultFileLockManager
private int timeout = 0;
@Override
public Lock readFileLock( File file )
throws FileLockException, FileLockTimeoutException
@ -56,7 +57,7 @@ public class DefaultFileLockManager
}
StopWatch stopWatch = new StopWatch();
boolean acquired = false;
mkdirs( file.getParentFile() );
try
{
Lock lock = new Lock( file, false );
@ -108,6 +109,8 @@ public class DefaultFileLockManager
return new Lock( file );
}
mkdirs( file.getParentFile() );
StopWatch stopWatch = new StopWatch();
boolean acquired = false;
@ -176,6 +179,36 @@ public class DefaultFileLockManager
}
}
private boolean mkdirs( File directory )
{
if ( directory == null )
{
return false;
}
if ( directory.exists() )
{
return false;
}
if ( directory.mkdir() )
{
return true;
}
File canonDir = null;
try
{
canonDir = directory.getCanonicalFile();
}
catch ( IOException e )
{
return false;
}
File parentDir = canonDir.getParentFile();
return ( parentDir != null && ( mkdirs( parentDir ) || parentDir.exists() ) && canonDir.mkdir() );
}
public int getTimeout()
{
return timeout;

View File

@ -27,9 +27,23 @@ import java.io.FileNotFoundException;
*/
public interface FileLockManager
{
/**
*
* @param file
* @return
* @throws FileLockException
* @throws FileLockTimeoutException
*/
Lock writeFileLock( File file )
throws FileLockException, FileLockTimeoutException;
/**
*
* @param file
* @return
* @throws FileLockException
* @throws FileLockTimeoutException
*/
Lock readFileLock( File file )
throws FileLockException, FileLockTimeoutException;

View File

@ -70,7 +70,7 @@ public class DefaultFileLockManagerTest
throws IOException
{
this.fileLockManager = fileLockManager;
file.createNewFile();
//file.createNewFile();
}

View File

@ -22,9 +22,9 @@ package org.apache.archiva.policies;
/**
* Policy to apply before the download is attempted.
*
*
*/
public interface PreDownloadPolicy extends DownloadPolicy
public interface PreDownloadPolicy
extends DownloadPolicy
{
// no op
}

View File

@ -51,6 +51,10 @@
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-filelock</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-scanner</artifactId>
@ -225,6 +229,7 @@
org.apache.maven.wagon.proxy,
org.apache.maven.wagon.repository,
com.google.common.io,
org.apache.archiva.common.filelock,
org.slf4j;resolution:=optional
</Import-Package>
</instructions>

View File

@ -25,6 +25,10 @@ import org.apache.archiva.admin.model.beans.NetworkProxy;
import org.apache.archiva.admin.model.beans.ProxyConnectorRuleType;
import org.apache.archiva.admin.model.beans.RemoteRepository;
import org.apache.archiva.admin.model.networkproxy.NetworkProxyAdmin;
import org.apache.archiva.common.filelock.FileLockException;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.filelock.FileLockTimeoutException;
import org.apache.archiva.common.filelock.Lock;
import org.apache.archiva.configuration.ArchivaConfiguration;
import org.apache.archiva.configuration.Configuration;
import org.apache.archiva.configuration.ConfigurationNames;
@ -144,6 +148,10 @@ public class DefaultRepositoryProxyConnectors
@Inject
private NetworkProxyAdmin networkProxyAdmin;
@Inject
@Named( value = "fileLockManager#default" )
private FileLockManager fileLockManager;
@PostConstruct
public void initialize()
{
@ -431,8 +439,8 @@ public class DefaultRepositoryProxyConnectors
catch ( RepositoryAdminException e )
{
log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ),
"Transfer error from repository {} for resource {}, continuing to next repository. Error message: {}",targetRepository.getRepository().getId(), path,
e.getMessage(), e );
"Transfer error from repository {} for resource {}, continuing to next repository. Error message: {}",
targetRepository.getRepository().getId(), path, e.getMessage(), e );
log.debug( MarkerFactory.getDetachedMarker( "transfer.error" ), "Full stack trace", e );
}
}
@ -490,14 +498,16 @@ public class DefaultRepositoryProxyConnectors
}
catch ( ProxyException e )
{
log.warn( "Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
targetRepository.getRepository().getId(), logicalPath, e.getMessage() );
log.warn(
"Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
targetRepository.getRepository().getId(), logicalPath, e.getMessage() );
log.debug( "Full stack trace", e );
}
catch ( RepositoryAdminException e )
{
log.warn( "Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
targetRepository.getRepository().getId(), logicalPath, e.getMessage() );
log.warn(
"Transfer error from repository {} for versioned Metadata {}, continuing to next repository. Error message: {}",
targetRepository.getRepository().getId(), logicalPath, e.getMessage() );
log.debug( "Full stack trace", e );
}
}
@ -898,7 +908,7 @@ public class DefaultRepositoryProxyConnectors
catch ( ProxyException e )
{
urlFailureCache.cacheFailure( url );
log.warn( "Transfer failed on checksum: {} : {}",url ,e.getMessage(), e );
log.warn( "Transfer failed on checksum: {} : {}", url, e.getMessage(), e );
// Critical issue, pass it on.
throw e;
}
@ -1059,7 +1069,7 @@ public class DefaultRepositoryProxyConnectors
log.warn(
"Transfer error from repository {} for artifact {} , continuing to next repository. Error message: {}",
content.getRepository().getId(), Keys.toKey( artifact), exception.getMessage() );
content.getRepository().getId(), Keys.toKey( artifact ), exception.getMessage() );
log.debug( "Full stack trace", exception );
}
@ -1086,40 +1096,51 @@ public class DefaultRepositoryProxyConnectors
private void moveTempToTarget( File temp, File target )
throws ProxyException
{
if ( target.exists() && !target.delete() )
{
throw new ProxyException( "Unable to overwrite existing target file: " + target.getAbsolutePath() );
}
target.getParentFile().mkdirs();
// TODO file lock library
RandomAccessFile raf;
if ( !temp.renameTo( target ) )
Lock lock = null;
try
{
log.warn( "Unable to rename tmp file to its final name... resorting to copy command." );
lock = fileLockManager.writeFileLock( target );
if ( lock.getFile().exists() && !lock.getFile().delete() )
{
throw new ProxyException( "Unable to overwrite existing target file: " + target.getAbsolutePath() );
}
try
lock.getFile().getParentFile().mkdirs();
if ( !temp.renameTo( lock.getFile() ) )
{
FileUtils.copyFile( temp, target );
}
catch ( IOException e )
{
if ( target.exists() )
log.warn( "Unable to rename tmp file to its final name... resorting to copy command." );
try
{
log.debug( "Tried to copy file {} to {} but file with this name already exists.", temp.getName(),
target.getAbsolutePath() );
FileUtils.copyFile( temp, lock.getFile() );
}
else
catch ( IOException e )
{
throw new ProxyException(
"Cannot copy tmp file " + temp.getAbsolutePath() + " to its final location", e );
if ( lock.getFile().exists() )
{
log.debug( "Tried to copy file {} to {} but file with this name already exists.",
temp.getName(), lock.getFile().getAbsolutePath() );
}
else
{
throw new ProxyException(
"Cannot copy tmp file " + temp.getAbsolutePath() + " to its final location", e );
}
}
finally
{
FileUtils.deleteQuietly( temp );
}
}
finally
{
FileUtils.deleteQuietly( temp );
}
} catch( FileLockException e)
{
throw new ProxyException( e.getMessage(), e );
} catch (FileLockTimeoutException e)
{
throw new ProxyException( e.getMessage(), e );
}
}
@ -1187,12 +1208,12 @@ public class DefaultRepositoryProxyConnectors
}
catch ( ConnectionException e )
{
log.warn( "Could not connect to {}: {}", remoteRepository.getRepository().getName(), e.getMessage() );
log.warn( "Could not connect to {}: {}", remoteRepository.getRepository().getName(), e.getMessage() );
connected = false;
}
catch ( AuthenticationException e )
{
log.warn( "Could not connect to {}: {}", remoteRepository.getRepository().getName(), e.getMessage() );
log.warn( "Could not connect to {}: {}", remoteRepository.getRepository().getName(), e.getMessage() );
connected = false;
}

View File

@ -74,6 +74,11 @@
<artifactId>archiva-indexer</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-filelock</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-repository-admin-api</artifactId>

View File

@ -90,6 +90,10 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-indexer</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-filelock</artifactId>
</dependency>
<dependency>
<groupId>org.apache.jackrabbit</groupId>
<artifactId>jackrabbit-webdav</artifactId>
@ -274,6 +278,7 @@
org.apache.archiva.redback.*,
org.apache.archiva.redback.components.taskqueue,
org.codehaus.plexus.util*,
org.apache.archiva.common.filelock,
org.codehaus.redback.integration.filter.authentication,
org.slf4j;resolution:=optional
</Import-Package>

View File

@ -22,6 +22,10 @@ package org.apache.archiva.webdav;
import org.apache.archiva.admin.model.beans.ManagedRepository;
import org.apache.archiva.audit.AuditEvent;
import org.apache.archiva.audit.AuditListener;
import org.apache.archiva.common.filelock.FileLockException;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.filelock.FileLockTimeoutException;
import org.apache.archiva.common.filelock.Lock;
import org.apache.archiva.redback.components.taskqueue.TaskQueueException;
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
import org.apache.archiva.scheduler.repository.model.RepositoryArchivaTaskScheduler;
@ -100,14 +104,16 @@ public class ArchivaDavResource
public static final String COMPLIANCE_CLASS = "1, 2";
private ArchivaTaskScheduler scheduler;
private final ArchivaTaskScheduler scheduler;
private final FileLockManager fileLockManager;
private Logger log = LoggerFactory.getLogger( ArchivaDavResource.class );
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepository repository,
DavSession session, ArchivaDavResourceLocator locator, DavResourceFactory factory,
MimeTypes mimeTypes, List<AuditListener> auditListeners,
RepositoryArchivaTaskScheduler scheduler )
RepositoryArchivaTaskScheduler scheduler, FileLockManager fileLockManager )
{
this.localResource = new File( localResource );
this.logicalResource = logicalResource;
@ -122,15 +128,16 @@ public class ArchivaDavResource
this.mimeTypes = mimeTypes;
this.auditListeners = auditListeners;
this.scheduler = scheduler;
this.fileLockManager = fileLockManager;
}
public ArchivaDavResource( String localResource, String logicalResource, ManagedRepository repository,
String remoteAddr, String principal, DavSession session,
ArchivaDavResourceLocator locator, DavResourceFactory factory, MimeTypes mimeTypes,
List<AuditListener> auditListeners, RepositoryArchivaTaskScheduler scheduler )
List<AuditListener> auditListeners, RepositoryArchivaTaskScheduler scheduler , FileLockManager fileLockManager )
{
this( localResource, logicalResource, repository, session, locator, factory, mimeTypes, auditListeners,
scheduler );
scheduler, fileLockManager );
this.remoteAddr = remoteAddr;
this.principal = principal;
@ -196,25 +203,37 @@ public class ArchivaDavResource
outputContext.setContentType( mimeTypes.getMimeType( localResource.getName() ) );
}
if ( !isCollection() && outputContext.hasStream() )
try
{
FileInputStream is = null;
try
if ( !isCollection() && outputContext.hasStream() )
{
// TODO file lock library
// Write content to stream
is = new FileInputStream( localResource );
IOUtils.copy( is, outputContext.getOutputStream() );
Lock lock = fileLockManager.readFileLock( localResource );
FileInputStream is = null;
try
{
// Write content to stream
is = new FileInputStream( lock.getFile() );
IOUtils.copy( is, outputContext.getOutputStream() );
}
finally
{
IOUtils.closeQuietly( is );
fileLockManager.release( lock );
}
}
finally
else if ( outputContext.hasStream() )
{
IOUtils.closeQuietly( is );
IndexWriter writer = new IndexWriter( this, localResource, logicalResource );
writer.write( outputContext );
}
}
else if ( outputContext.hasStream() )
catch ( FileLockException e )
{
IndexWriter writer = new IndexWriter( this, localResource, logicalResource );
writer.write( outputContext );
throw new IOException( e.getMessage(), e );
}
catch ( FileLockTimeoutException e )
{
throw new IOException( e.getMessage(), e );
}
}

View File

@ -27,6 +27,7 @@ import org.apache.archiva.admin.model.remote.RemoteRepositoryAdmin;
import org.apache.archiva.audit.AuditEvent;
import org.apache.archiva.audit.AuditListener;
import org.apache.archiva.audit.Auditable;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.archiva.common.plexusbridge.PlexusSisuBridge;
import org.apache.archiva.common.plexusbridge.PlexusSisuBridgeException;
import org.apache.archiva.common.utils.PathUtil;
@ -178,6 +179,10 @@ public class ArchivaDavResourceFactory
@Named( value = "archivaTaskScheduler#repository" )
private RepositoryArchivaTaskScheduler scheduler;
@Inject
@Named(value= "fileLockManager#default")
private FileLockManager fileLockManager;
private ApplicationContext applicationContext;
@Inject
@ -268,7 +273,7 @@ public class ArchivaDavResourceFactory
resource = new ArchivaDavResource( resourceFile.getAbsolutePath(), locator.getResourcePath(), null,
request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
auditListeners, scheduler );
auditListeners, scheduler, fileLockManager );
setHeaders( response, locator, resource );
return resource;
}
@ -345,7 +350,7 @@ public class ArchivaDavResourceFactory
resource =
new ArchivaDavResource( metadataChecksum.getAbsolutePath(), logicalResource.getPath(), null,
request.getRemoteAddr(), activePrincipal, request.getDavSession(),
archivaLocator, this, mimeTypes, auditListeners, scheduler );
archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager );
}
}
else
@ -384,7 +389,7 @@ public class ArchivaDavResourceFactory
new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource.getPath(), null,
request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
auditListeners, scheduler );
auditListeners, scheduler, fileLockManager );
}
catch ( RepositoryMetadataException r )
{
@ -440,7 +445,7 @@ public class ArchivaDavResourceFactory
File resourceFile = new File( temporaryIndexDirectory, requestedFileName );
resource = new ArchivaDavResource( resourceFile.getAbsolutePath(), requestedFileName, null,
request.getRemoteAddr(), activePrincipal, request.getDavSession(),
archivaLocator, this, mimeTypes, auditListeners, scheduler );
archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager );
}
else
@ -571,7 +576,7 @@ public class ArchivaDavResourceFactory
resource =
new ArchivaDavResource( resourceFile.getAbsolutePath(), path, managedRepositoryContent.getRepository(),
request.getRemoteAddr(), activePrincipal, request.getDavSession(),
archivaLocator, this, mimeTypes, auditListeners, scheduler );
archivaLocator, this, mimeTypes, auditListeners, scheduler, fileLockManager );
if ( WebdavMethodUtil.isReadMethod( request.getMethod() ) )
{
@ -604,7 +609,7 @@ public class ArchivaDavResourceFactory
managedRepositoryContent.getRepository(),
request.getRemoteAddr(), activePrincipal,
request.getDavSession(), archivaLocator, this, mimeTypes,
auditListeners, scheduler );
auditListeners, scheduler, fileLockManager );
}
catch ( LayoutException e )
{
@ -726,7 +731,7 @@ public class ArchivaDavResourceFactory
File resourceFile = new File( managedRepositoryContent.getRepoRoot(), logicalResource );
resource = new ArchivaDavResource( resourceFile.getAbsolutePath(), logicalResource,
managedRepositoryContent.getRepository(), davSession, archivaLocator,
this, mimeTypes, auditListeners, scheduler );
this, mimeTypes, auditListeners, scheduler, fileLockManager );
resource.addLockManager( lockManager );
}

View File

@ -22,6 +22,7 @@ package org.apache.archiva.webdav;
import junit.framework.TestCase;
import org.apache.archiva.admin.model.beans.ManagedRepository;
import org.apache.archiva.audit.AuditListener;
import org.apache.archiva.common.filelock.FileLockManager;
import org.apache.commons.io.FileUtils;
import org.apache.jackrabbit.webdav.DavException;
import org.apache.jackrabbit.webdav.DavResource;
@ -58,6 +59,9 @@ public class DavResourceTest
@Inject
private MimeTypes mimeTypes;
@Inject
private FileLockManager fileLockManager;
private ArchivaDavResourceLocator resourceLocator;
private DavResourceFactory resourceFactory;
@ -105,7 +109,7 @@ public class DavResourceTest
private DavResource getDavResource( String logicalPath, File file )
{
return new ArchivaDavResource( file.getAbsolutePath(), logicalPath, repository, session, resourceLocator,
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), null );
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(), null, fileLockManager );
}
@Test
@ -324,7 +328,7 @@ public class DavResourceTest
{
return new ArchivaDavResource( baseDir.getAbsolutePath(), "/", repository, session, resourceLocator,
resourceFactory, mimeTypes, Collections.<AuditListener> emptyList(),
null );
null, fileLockManager );
}
}
}