[MRM-138] add expiry to failure cache

git-svn-id: https://svn.apache.org/repos/asf/maven/repository-manager/trunk@431143 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Brett Porter 2006-08-13 04:12:06 +00:00
parent c8232cf0b3
commit a570a12409
3 changed files with 228 additions and 100 deletions

View File

@ -110,29 +110,11 @@ public File getAlways( String path, List proxiedRepositories, ArtifactRepository
if ( repository.isCachedFailure( path ) )
{
processRepositoryFailure( repository, "Cached failure found" );
processCachedRepositoryFailure( repository, "Cached failure found for: " + path );
}
else
{
try
{
get( path, target, repository, managedRepository, wagonProxy );
if ( !target.exists() )
{
repository.addFailure( path );
}
else
{
// in case it previously failed and we've since found it
repository.clearFailure( path );
}
}
catch ( ProxyException e )
{
repository.addFailure( path );
throw e;
}
}
}
@ -148,16 +130,20 @@ private void get( String path, File target, ProxiedArtifactRepository repository
ArtifactRepository managedRepository, ProxyInfo wagonProxy )
throws ProxyException
{
ArtifactRepositoryPolicy policy = null;
if ( path.endsWith( ".md5" ) || path.endsWith( ".sha1" ) )
{
// always read from the managed repository, no need to make remote request
}
else if ( path.endsWith( "maven-metadata.xml" ) )
{
// TODO: this is not always!
if ( !target.exists() || isOutOfDate( repository.getRepository().getReleases(), target ) )
// TODO: this is not "always" as this method expects!
// TODO: merge the metadata!
policy = repository.getRepository().getReleases();
if ( !target.exists() || isOutOfDate( policy, target ) )
{
getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, target );
getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, target, policy );
}
}
else
@ -186,22 +172,46 @@ else if ( path.endsWith( "maven-metadata.xml" ) )
if ( artifact != null )
{
getArtifactFromRepository( artifact, repository, managedRepository, wagonProxy, target );
ArtifactRepository artifactRepository = repository.getRepository();
policy = artifact.isSnapshot() ? artifactRepository.getSnapshots() : artifactRepository.getReleases();
if ( !policy.isEnabled() )
{
getLogger().debug( "Skipping disabled repository " + repository.getName() );
}
else
{
// Don't use releases policy, we don't want to perform updates on them (only metadata, as used earlier)
// TODO: this is not "always" as this method expects!
if ( !target.exists() || isOutOfDate( policy, target ) )
{
getFileFromRepository( artifactRepository.pathOf( artifact ), repository,
managedRepository.getBasedir(), wagonProxy, target, policy );
}
}
}
else
{
// Some other unknown file in the repository, proxy as is
// TODO: this is not always!
// TODO: this is not "always" as this method expects!
if ( !target.exists() )
{
getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, target );
}
policy = repository.getRepository().getReleases();
getFileFromRepository( path, repository, managedRepository.getBasedir(), wagonProxy, target,
policy );
}
}
}
if ( target.exists() )
{
// in case it previously failed and we've since found it
repository.clearFailure( path );
}
}
private void getFileFromRepository( String path, ProxiedArtifactRepository repository, String repositoryCachePath,
ProxyInfo httpProxy, File target )
ProxyInfo httpProxy, File target, ArtifactRepositoryPolicy policy )
throws ProxyException
{
boolean connected = false;
@ -249,7 +259,8 @@ private void getFileFromRepository( String path, ProxiedArtifactRepository repos
if ( tries > 1 && !success )
{
processRepositoryFailure( repository, "Checksum failures occurred while downloading " + path );
processRepositoryFailure( repository, "Checksum failures occurred while downloading " + path,
path, policy );
return;
}
@ -265,11 +276,11 @@ private void getFileFromRepository( String path, ProxiedArtifactRepository repos
}
catch ( TransferFailedException e )
{
processRepositoryFailure( repository, e );
processRepositoryFailure( repository, e, path, policy );
}
catch ( AuthorizationException e )
{
processRepositoryFailure( repository, e );
processRepositoryFailure( repository, e, path, policy );
}
catch ( ResourceDoesNotExistException e )
{
@ -481,23 +492,34 @@ private void disconnectWagon( Wagon wagon )
}
}
private void processRepositoryFailure( ProxiedArtifactRepository repository, Throwable t )
private void processRepositoryFailure( ProxiedArtifactRepository repository, Throwable t, String path,
ArtifactRepositoryPolicy policy )
throws ProxyException
{
repository.addFailure( path, policy );
String message = t.getMessage();
if ( repository.isHardFail() )
{
repository.addFailure( path, policy );
throw new ProxyException(
"An error occurred in hardfailing repository " + repository.getName() + "...\n " + t.getMessage(),
t );
}
else
{
getLogger().warn( "Skipping repository " + repository.getName() + ": " + t.getMessage() );
getLogger().debug( "Cause", t );
}
"An error occurred in hardfailing repository " + repository.getName() + "...\n " + message, t );
}
private void processRepositoryFailure( ProxiedArtifactRepository repository, String message )
getLogger().warn( "Skipping repository " + repository.getName() + ": " + message );
getLogger().debug( "Cause", t );
}
private void processRepositoryFailure( ProxiedArtifactRepository repository, String message, String path,
ArtifactRepositoryPolicy policy )
throws ProxyException
{
repository.addFailure( path, policy );
processCachedRepositoryFailure( repository, message );
}
private void processCachedRepositoryFailure( ProxiedArtifactRepository repository, String message )
throws ProxyException
{
if ( repository.isHardFail() )
@ -505,38 +527,7 @@ private void processRepositoryFailure( ProxiedArtifactRepository repository, Str
throw new ProxyException(
"An error occurred in hardfailing repository " + repository.getName() + "...\n " + message );
}
else
{
getLogger().warn( "Skipping repository " + repository.getName() + ": " + message );
}
}
private void getArtifactFromRepository( Artifact artifact, ProxiedArtifactRepository repository,
ArtifactRepository managedRepository, ProxyInfo httpProxy, File remoteFile )
throws ProxyException
{
ArtifactRepository artifactRepository = repository.getRepository();
ArtifactRepositoryPolicy policy =
artifact.isSnapshot() ? artifactRepository.getSnapshots() : artifactRepository.getReleases();
if ( !policy.isEnabled() )
{
getLogger().debug( "Skipping disabled repository " + repository.getName() );
}
else
{
getLogger().debug( "Trying repository " + repository.getName() );
// Don't use releases policy, we don't want to perform updates on them (only metadata, as used earlier)
// TODO: this is not always!
if ( !remoteFile.exists() || isOutOfDate( policy, remoteFile ) )
{
getFileFromRepository( artifactRepository.pathOf( artifact ), repository,
managedRepository.getBasedir(), httpProxy, remoteFile );
}
getLogger().debug( " Artifact resolved" );
artifact.setResolved( true );
}
}
}

View File

@ -17,9 +17,11 @@
*/
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
import java.util.HashSet;
import java.util.Set;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
/**
* A proxied artifact repository - contains the artifact repository and additional information about
@ -50,9 +52,10 @@ public class ProxiedArtifactRepository
private final ArtifactRepository repository;
/**
* Cache of failures that have already occurred, containing paths from the repository root.
* Cache of failures that have already occurred, containing paths from the repository root. The value given
* specifies when the failure should expire.
*/
private Set/*<String>*/ failureCache = new HashSet/*<String>*/();
private Map/*<String,Long>*/ failureCache = new HashMap/*<String,Long>*/();
/**
* A user friendly name for the repository.
@ -84,26 +87,88 @@ public ArtifactRepository getRepository()
return repository;
}
/**
* Check if there is a previously cached failure for requesting the given path.
*
* @param path the path
* @return whether there is a failure
*/
public boolean isCachedFailure( String path )
{
return cacheFailures && failureCache.contains( path );
}
public void addFailure( String path )
{
boolean failed = false;
if ( cacheFailures )
{
failureCache.add( path );
Long time = (Long) failureCache.get( path );
if ( time != null )
{
if ( System.currentTimeMillis() < time.longValue() )
{
failed = true;
}
else
{
clearFailure( path );
}
}
}
return failed;
}
/**
* Add a failure to the cache.
*
* @param path the path that failed
* @param policy the policy for when the failure should expire
*/
public void addFailure( String path, ArtifactRepositoryPolicy policy )
{
failureCache.put( path, new Long( calculateExpiryTime( policy ) ) );
}
private long calculateExpiryTime( ArtifactRepositoryPolicy policy )
{
String updatePolicy = policy.getUpdatePolicy();
long time;
if ( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS.equals( updatePolicy ) )
{
time = 0;
}
else if ( ArtifactRepositoryPolicy.UPDATE_POLICY_DAILY.equals( updatePolicy ) )
{
// Get midnight boundary
Calendar cal = Calendar.getInstance();
cal.set( Calendar.HOUR_OF_DAY, 0 );
cal.set( Calendar.MINUTE, 0 );
cal.set( Calendar.SECOND, 0 );
cal.set( Calendar.MILLISECOND, 0 );
cal.add( Calendar.DAY_OF_MONTH, 1 );
time = cal.getTime().getTime();
}
else if ( updatePolicy.startsWith( ArtifactRepositoryPolicy.UPDATE_POLICY_INTERVAL ) )
{
String s = updatePolicy.substring( ArtifactRepositoryPolicy.UPDATE_POLICY_INTERVAL.length() + 1 );
int minutes = Integer.valueOf( s ).intValue();
Calendar cal = Calendar.getInstance();
cal.add( Calendar.MINUTE, minutes );
time = cal.getTime().getTime();
}
else
{
// else assume "never"
time = Long.MAX_VALUE;
}
return time;
}
/**
* Remove a failure.
*
* @param path the path that had previously failed
*/
public void clearFailure( String path )
{
if ( cacheFailures )
{
failureCache.remove( path );
}
}
public String getName()
{

View File

@ -18,6 +18,7 @@
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.apache.maven.wagon.TransferFailedException;
@ -30,33 +31,34 @@
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
/**
* @author Brett Porter
* @todo! tests to do vvv
* @todo test when failure should be cached but caching is disabled
* @todo test snapshots - general
* @todo test snapshots - newer version on repo2 is pulled down
* @todo test snapshots - older version on repo2 is skipped
* @todo test snapshots - update interval
* @todo test get always
* @todo test get always when resource is present locally but not in any proxied repos (should fail)
* @todo test get always ignores cached failures
* @todo test when managed repo is m1 layout (proxy is m2), including metadata
* @todo test when one proxied repo is m1 layout (managed is m2), including metadata
* @todo test when one proxied repo is m1 layout (managed is m1), including metadata
* @todo test metadata - general
* @todo test metadata - multiple repos are merged
* @todo test metadata - update interval
* @todo test metadata - looking for an update and file has been removed remotely
* @todo test when managed repo is m1 layout (proxy is m2), including metadata
* @todo test when one proxied repo is m1 layout (managed is m2), including metadata
* @todo test when one proxied repo is m1 layout (managed is m1), including metadata
* @todo test get always
* @todo test get always when resource is present locally but not in any proxied repos (should fail)
* @todo test snapshots - general
* @todo test snapshots - newer version on repo2 is pulled down
* @todo test snapshots - older version on repo2 is skipped
* @todo test snapshots - update interval
* @todo test snapshots - when failure is cached but cache period is over (and check failure is cleared)
* @todo test remote checksum only md5
* @todo test remote checksum only sha1
* @todo test remote checksum missing
* @todo test remote checksum present and correct
* @todo test remote checksum present and incorrect
* @todo test remote checksum transfer failed
* @todo test when failure is cached but cache period is over (and check failure is cleared)
*/
public class ProxyRequestHandlerTest
extends PlexusTestCase
@ -79,6 +81,12 @@ public class ProxyRequestHandlerTest
private Wagon wagonMock;
private static final ArtifactRepositoryPolicy DEFAULT_POLICY =
new ArtifactRepositoryPolicy( true, ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER, null );
private static final ArtifactRepositoryPolicy ALWAYS_UPDATE_POLICY =
new ArtifactRepositoryPolicy( true, ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS, null );
protected void setUp()
throws Exception
{
@ -336,7 +344,7 @@ public void testGetInSecondProxiedRepoFirstFailsFromCache()
proxiedRepositories.clear();
ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
proxiedArtifactRepository.addFailure( path );
proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
proxiedRepositories.add( proxiedArtifactRepository );
proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
@ -366,7 +374,7 @@ public void testGetInSecondProxiedRepoFirstHardFailsFromCache()
proxiedRepositories.clear();
ProxiedArtifactRepository proxiedArtifactRepository = createHardFailProxiedRepository( proxiedRepository1 );
proxiedArtifactRepository.addFailure( path );
proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
proxiedRepositories.add( proxiedArtifactRepository );
proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
try
@ -381,13 +389,77 @@ public void testGetInSecondProxiedRepoFirstHardFailsFromCache()
}
}
public void testGetInSecondProxiedRepoFirstFailsDisabledCacheFailure()
throws ResourceDoesNotExistException, ProxyException, IOException, TransferFailedException,
AuthorizationException
{
String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
File expectedFile = new File( defaultManagedRepository.getBasedir(), path ).getAbsoluteFile();
assertFalse( expectedFile.exists() );
proxiedRepository1 = createRepository( "proxied1", "test://..." );
proxiedRepositories.clear();
ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
proxiedArtifactRepository.addFailure( path, DEFAULT_POLICY );
proxiedArtifactRepository.setCacheFailures( false );
proxiedRepositories.add( proxiedArtifactRepository );
proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
wagonMock.get( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ) );
wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
wagonMockControl.replay();
File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
wagonMockControl.verify();
assertEquals( "Check file matches", expectedFile, file );
assertTrue( "Check file created", file.exists() );
File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
String expectedContents = FileUtils.fileRead( proxiedFile );
assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
}
public void testGetWhenInBothProxiedReposFirstHasExpiredCacheFailure()
throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
{
String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
assertFalse( expectedFile.exists() );
proxiedRepositories.clear();
ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
proxiedArtifactRepository.addFailure( path, ALWAYS_UPDATE_POLICY );
proxiedRepositories.add( proxiedArtifactRepository );
proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
assertEquals( "Check file matches", expectedFile, file );
assertTrue( "Check file created", file.exists() );
File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
String expectedContents = FileUtils.fileRead( proxiedFile );
assertEquals( "Check file contents", expectedContents, FileUtils.fileRead( file ) );
proxiedFile = new File( proxiedRepository2.getBasedir(), path );
String unexpectedContents = FileUtils.fileRead( proxiedFile );
assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.fileRead( file ) ) );
assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
}
/**
* A faster recursive copy that omits .svn directories.
*
* @param sourceDirectory the source directory to copy
* @param destDirectory the target location
* @throws java.io.IOException if there is a copying problem
* @todo get back into plexus-utils, share with indexing module
* @todo get back into plexus-utils, share with converter module
*/
private static void copyDirectoryStructure( File sourceDirectory, File destDirectory )
throws IOException