* Expanding Proxy / Policy tests.

git-svn-id: https://svn.apache.org/repos/asf/maven/archiva/branches/archiva-jpox-database-refactor@529106 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Joakim Erdfelt 2007-04-16 00:49:27 +00:00
parent 284ce85703
commit 4e7c4eff6c
28 changed files with 3138 additions and 2702 deletions

View File

@ -37,10 +37,13 @@
<groupId>org.apache.maven.archiva</groupId>
<artifactId>archiva-repository-layer</artifactId>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>org.apache.maven.wagon</groupId>
<artifactId>wagon-file</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.maven.wagon</groupId>
@ -57,8 +60,17 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-slf4j-logging</artifactId>
<version>1.1-alpha-1-SNAPSHOT</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.2</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -19,22 +19,21 @@ package org.apache.maven.archiva.proxy;
* under the License.
*/
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.NetworkProxyConfiguration;
import org.apache.maven.archiva.configuration.ProxyConnectorConfiguration;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.configuration.RepositoryProxyConnectorConfiguration;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.ProjectReference;
import org.apache.maven.archiva.proxy.policy.PostfetchPolicy;
import org.apache.maven.archiva.proxy.policy.PrefetchPolicy;
import org.apache.maven.archiva.policies.DownloadPolicy;
import org.apache.maven.archiva.policies.urlcache.UrlFailureCache;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayoutFactory;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.wagon.ConnectionException;
import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.apache.maven.wagon.Wagon;
import org.apache.maven.wagon.WagonException;
import org.apache.maven.wagon.authentication.AuthenticationException;
@ -50,11 +49,14 @@ import org.codehaus.plexus.util.SelectorUtils;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Map.Entry;
/**
* DefaultRepositoryProxyConnectors
@ -86,14 +88,19 @@ public class DefaultRepositoryProxyConnectors
private BidirectionalRepositoryLayoutFactory layoutFactory;
/**
* @plexus.requirement role="checksum"
* @plexus.requirement role="org.apache.maven.archiva.policies.PreDownloadPolicy"
*/
private PrefetchPolicy checksumPolicy;
private Map preDownloadPolicies;
/**
* @plexus.requirement role="artifact-update"
* @plexus.requirement role="org.apache.maven.archiva.policies.PostDownloadPolicy"
*/
private PostfetchPolicy updatePolicy;
private Map postDownloadPolicies;
/**
* @plexus.requirement role-hint="default"
*/
private UrlFailureCache urlFailureCache;
private Map proxyConnectorMap = new HashMap();
@ -101,7 +108,7 @@ public class DefaultRepositoryProxyConnectors
private List propertyNameTriggers = new ArrayList();
public boolean fetchFromProxies( ArchivaRepository repository, ArtifactReference artifact )
public File fetchFromProxies( ArchivaRepository repository, ArtifactReference artifact )
throws ProxyException
{
if ( !repository.isManaged() )
@ -122,36 +129,41 @@ public class DefaultRepositoryProxyConnectors
+ e.getMessage(), e );
}
boolean isSnapshot = VersionUtil.isSnapshot( artifact.getVersion() );
Properties requestProperties = new Properties();
requestProperties.setProperty( "version", artifact.getVersion() );
List connectors = getProxyConnectors( repository );
Iterator it = connectors.iterator();
while ( it.hasNext() )
{
ProxyConnector connector = (ProxyConnector) it.next();
getLogger().debug( "Attempting connector: " + connector );
ArchivaRepository targetRepository = connector.getTargetRepository();
try
{
BidirectionalRepositoryLayout targetLayout = layoutFactory.getLayout( targetRepository.getLayoutType() );
String targetPath = targetLayout.toPath( artifact );
if ( transferFile( connector, targetRepository, targetPath, localFile, isSnapshot ) )
File downloadedFile = transferFile( connector, targetRepository, targetPath, localFile,
requestProperties );
if ( fileExists( downloadedFile ) )
{
// Transfer was successful. return.
return true;
getLogger().info( "Successfully transfered: " + downloadedFile.getAbsolutePath() );
return downloadedFile;
}
}
catch ( LayoutException e )
{
getLogger().error( "Unable to proxy due to bad layout definition: " + e.getMessage(), e );
return false;
return null;
}
}
return false;
return null;
}
public boolean fetchFromProxies( ArchivaRepository repository, ProjectReference metadata )
public File fetchFromProxies( ArchivaRepository repository, ProjectReference metadata )
throws ProxyException
{
if ( !repository.isManaged() )
@ -172,6 +184,8 @@ public class DefaultRepositoryProxyConnectors
+ e.getMessage(), e );
}
Properties requestProperties = new Properties();
List connectors = getProxyConnectors( repository );
Iterator it = connectors.iterator();
while ( it.hasNext() )
@ -183,20 +197,43 @@ public class DefaultRepositoryProxyConnectors
BidirectionalRepositoryLayout targetLayout = layoutFactory.getLayout( targetRepository.getLayoutType() );
String targetPath = targetLayout.toPath( metadata ) + FILENAME_MAVEN_METADATA;
if ( transferFile( connector, targetRepository, targetPath, localFile, false ) )
File downloadedFile = transferFile( connector, targetRepository, targetPath, localFile,
requestProperties );
if ( fileExists( downloadedFile ) )
{
// Transfer was successful. return.
return true;
getLogger().info( "Successfully transfered: " + downloadedFile.getAbsolutePath() );
return downloadedFile;
}
}
catch ( LayoutException e )
{
getLogger().error( "Unable to proxy due to bad layout definition: " + e.getMessage(), e );
return false;
return null;
}
}
return false;
return null;
}
private boolean fileExists( File file )
{
if ( file == null )
{
return false;
}
if ( !file.exists() )
{
return false;
}
if ( !file.isFile() )
{
return false;
}
return true;
}
/**
@ -206,39 +243,38 @@ public class DefaultRepositoryProxyConnectors
* @param targetRepository
* @param targetPath
* @param localFile
* @param isSnapshot
* @param requestProperties
* @return
* @throws ProxyException
*/
private boolean transferFile( ProxyConnector connector, ArchivaRepository targetRepository, String targetPath,
File localFile, boolean isSnapshot )
private File transferFile( ProxyConnector connector, ArchivaRepository targetRepository, String targetPath,
File localFile, Properties requestProperties )
throws ProxyException
{
if ( isSnapshot )
String url = targetRepository.getUrl().toString() + targetPath;
requestProperties.setProperty( "url", url );
// Handle pre-download policy
if ( !applyPolicies( connector.getPolicies(), this.preDownloadPolicies, requestProperties, localFile ) )
{
// Handle Snapshot Policy
if ( !updatePolicy.applyPolicy( connector.getSnapshotsPolicy(), localFile ) )
getLogger().info( "Failed pre-download policies - " + localFile.getAbsolutePath() );
if ( fileExists( localFile ) )
{
return false;
}
}
else
{
// Handle Release Policy
if ( !updatePolicy.applyPolicy( connector.getReleasesPolicy(), localFile ) )
{
return false;
return localFile;
}
return null;
}
// Is a whitelist defined?
if ( CollectionUtils.isNotEmpty( connector.getWhitelist() ) )
if ( !isEmpty( connector.getWhitelist() ) )
{
// Path must belong to whitelist.
if ( !matchesPattern( targetPath, connector.getWhitelist() ) )
{
getLogger().debug( "Path [" + targetPath + "] is not part of defined whitelist (skipping transfer)." );
return false;
return null;
}
}
@ -246,17 +282,12 @@ public class DefaultRepositoryProxyConnectors
if ( matchesPattern( targetPath, connector.getBlacklist() ) )
{
getLogger().debug( "Path [" + targetPath + "] is part of blacklist (skipping transfer)." );
return false;
return null;
}
// Transfer the file.
Wagon wagon = null;
try
{
File temp = new File( localFile.getAbsolutePath() + ".tmp" );
temp.deleteOnExit();
String protocol = targetRepository.getUrl().getProtocol();
wagon = (Wagon) wagons.get( protocol );
if ( wagon == null )
@ -267,43 +298,166 @@ public class DefaultRepositoryProxyConnectors
boolean connected = connectToRepository( connector, wagon, targetRepository );
if ( connected )
{
if ( localFile.exists() )
{
getLogger().debug( "Retrieving " + targetPath + " from " + targetRepository.getName() );
wagon.get( targetPath, temp );
}
else
{
getLogger().debug(
"Retrieving " + targetPath + " from " + targetRepository.getName()
+ " if updated" );
wagon.getIfNewer( targetPath, temp, localFile.lastModified() );
}
localFile = transferSimpleFile( wagon, targetRepository, targetPath, localFile );
transferChecksum( wagon, targetRepository, targetPath, localFile, ".sha1" );
transferChecksum( wagon, targetRepository, targetPath, localFile, ".md5" );
}
}
catch ( ResourceDoesNotExistException e )
{
// Do not cache url here.
return null;
}
catch ( WagonException e )
{
urlFailureCache.cacheFailure( url );
return null;
}
finally
{
if ( wagon != null )
{
try
{
wagon.disconnect();
}
catch ( ConnectionException e )
{
getLogger().warn( "Unable to disconnect wagon.", e );
}
}
}
// Handle post-download policies.
if ( !applyPolicies( connector.getPolicies(), this.postDownloadPolicies, requestProperties, localFile ) )
{
getLogger().info( "Failed post-download policies - " + localFile.getAbsolutePath() );
if ( fileExists( localFile ) )
{
return localFile;
}
return null;
}
// Everything passes.
return localFile;
}
private void transferChecksum( Wagon wagon, ArchivaRepository targetRepository, String targetPath, File localFile,
String type )
throws ProxyException
{
String url = targetRepository.getUrl().toString() + targetPath;
// Transfer checksum does not use the policy.
if ( urlFailureCache.hasFailedBefore( url + type ) )
{
return;
}
try
{
File hashFile = new File( localFile.getAbsolutePath() + type );
transferSimpleFile( wagon, targetRepository, targetPath + type, hashFile );
getLogger().debug( "Checksum" + type + " Downloaded: " + hashFile );
}
catch ( ResourceDoesNotExistException e )
{
getLogger().debug( "Checksum" + type + " Not Download: " + e.getMessage() );
}
catch ( WagonException e )
{
urlFailureCache.cacheFailure( url + type );
getLogger().warn( "Transfer failed on checksum: " + url + " : " + e.getMessage(), e );
}
}
private File transferSimpleFile( Wagon wagon, ArchivaRepository targetRepository, String targetPath, File localFile )
throws ProxyException, WagonException
{
// Transfer the file.
File temp = null;
try
{
temp = new File( localFile.getAbsolutePath() + ".tmp" );
boolean success = false;
if ( localFile.exists() )
{
getLogger().debug( "Retrieving " + targetPath + " from " + targetRepository.getName() );
wagon.get( targetPath, temp );
success = true;
// temp won't exist if we called getIfNewer and it was older, but its still a successful return
if ( temp.exists() )
{
moveTempToTarget( temp, localFile );
}
else
// You wouldn't get here on failure, a WagonException would have been thrown.
getLogger().debug( "Downloaded successfully." );
}
else
{
getLogger().debug( "Retrieving " + targetPath + " from " + targetRepository.getName() + " if updated" );
success = wagon.getIfNewer( targetPath, temp, localFile.lastModified() );
if ( !success )
{
getLogger().debug(
"Attempt to retrieving " + targetPath + " from " + targetRepository.getName()
+ " failed: local file does not exist." );
return false;
"Not downloaded, as local file is newer than remote side: "
+ localFile.getAbsolutePath() );
}
else if ( temp.exists() )
{
getLogger().debug( "Downloaded successfully." );
moveTempToTarget( temp, localFile );
}
getLogger().debug( "Successfully downloaded" );
}
return localFile;
}
catch ( ResourceDoesNotExistException e )
{
getLogger().warn( "Resource does not exist: " + e.getMessage() );
throw e;
}
catch ( WagonException e )
{
getLogger().warn( "Download failure:" + e.getMessage(), e );
return false;
throw e;
}
finally
{
if ( temp != null )
{
temp.delete();
}
}
}
// Handle checksum Policy.
return checksumPolicy.applyPolicy( connector.getChecksumPolicy(), localFile );
private boolean applyPolicies( Properties policySettings, Map downloadPolicies, Properties request, File localFile )
{
Iterator it = downloadPolicies.entrySet().iterator();
while ( it.hasNext() )
{
Map.Entry entry = (Entry) it.next();
String key = (String) entry.getKey();
DownloadPolicy policy = (DownloadPolicy) entry.getValue();
String defaultSetting = policy.getDefaultPolicySetting();
String setting = policySettings.getProperty( key, defaultSetting );
getLogger().debug( "Applying [" + key + "] policy with [" + setting + "]" );
if ( !policy.applyPolicy( setting, request, localFile ) )
{
getLogger().debug( "Didn't pass the [" + key + "] policy." );
return false;
}
}
return true;
}
/**
@ -378,7 +532,7 @@ public class DefaultRepositoryProxyConnectors
private boolean matchesPattern( String path, List patterns )
{
if ( CollectionUtils.isEmpty( patterns ) )
if ( isEmpty( patterns ) )
{
return false;
}
@ -442,20 +596,18 @@ public class DefaultRepositoryProxyConnectors
it = proxyConfigs.iterator();
while ( it.hasNext() )
{
RepositoryProxyConnectorConfiguration proxyConfig = (RepositoryProxyConnectorConfiguration) it.next();
ProxyConnectorConfiguration proxyConfig = (ProxyConnectorConfiguration) it.next();
String key = proxyConfig.getSourceRepoId();
// Create connector object.
ProxyConnector connector = new ProxyConnector();
connector.setSourceRepository( getRepository( proxyConfig.getSourceRepoId() ) );
connector.setTargetRepository( getRepository( proxyConfig.getTargetRepoId() ) );
connector.setSnapshotsPolicy( proxyConfig.getSnapshotsPolicy() );
connector.setReleasesPolicy( proxyConfig.getReleasesPolicy() );
connector.setChecksumPolicy( proxyConfig.getChecksumPolicy() );
connector.setPolicies( proxyConfig.getPolicies() );
// Copy any blacklist patterns.
List blacklist = new ArrayList();
if ( !CollectionUtils.isEmpty( proxyConfig.getBlackListPatterns() ) )
if ( !isEmpty( proxyConfig.getBlackListPatterns() ) )
{
blacklist.addAll( proxyConfig.getBlackListPatterns() );
}
@ -463,7 +615,7 @@ public class DefaultRepositoryProxyConnectors
// Copy any whitelist patterns.
List whitelist = new ArrayList();
if ( !CollectionUtils.isEmpty( proxyConfig.getWhiteListPatterns() ) )
if ( !isEmpty( proxyConfig.getWhiteListPatterns() ) )
{
whitelist.addAll( proxyConfig.getWhiteListPatterns() );
}
@ -509,6 +661,16 @@ public class DefaultRepositoryProxyConnectors
}
}
private boolean isEmpty( Collection collection )
{
if ( collection == null )
{
return true;
}
return collection.isEmpty();
}
private ArchivaRepository getRepository( String repoId )
{
RepositoryConfiguration repoConfig = archivaConfiguration.getConfiguration().findRepositoryById( repoId );

View File

@ -22,7 +22,9 @@ package org.apache.maven.archiva.proxy;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.repository.connector.RepositoryConnector;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
/**
* This represents a connector for a repository to repository proxy.
@ -41,14 +43,10 @@ public class ProxyConnector
private List whitelist;
private String snapshotsPolicy;
private String releasesPolicy;
private String checksumPolicy;
private String proxyId;
private Properties policies;
public List getBlacklist()
{
return blacklist;
@ -89,34 +87,14 @@ public class ProxyConnector
this.whitelist = whitelist;
}
public String getChecksumPolicy()
public Properties getPolicies()
{
return checksumPolicy;
return policies;
}
public void setChecksumPolicy( String failurePolicy )
public void setPolicies( Properties policies )
{
this.checksumPolicy = failurePolicy;
}
public String getReleasesPolicy()
{
return releasesPolicy;
}
public void setReleasesPolicy( String releasesPolicy )
{
this.releasesPolicy = releasesPolicy;
}
public String getSnapshotsPolicy()
{
return snapshotsPolicy;
}
public void setSnapshotsPolicy( String snapshotsPolicy )
{
this.snapshotsPolicy = snapshotsPolicy;
this.policies = policies;
}
public String getProxyId()
@ -128,4 +106,31 @@ public class ProxyConnector
{
this.proxyId = proxyId;
}
public String toString()
{
StringBuffer sb = new StringBuffer();
sb.append( "ProxyConnector[\n" );
sb.append( " source:" ).append( this.sourceRepository ).append( "\n" );
sb.append( " target:" ).append( this.targetRepository ).append( "\n" );
sb.append( " proxyId:" ).append( this.proxyId ).append( "\n" );
Enumeration keys = this.policies.propertyNames();
while ( keys.hasMoreElements() )
{
String name = (String) keys.nextElement();
sb.append( " policy[" ).append( name ).append( "]:" );
sb.append( this.policies.getProperty( name ) ).append( "\n" );
}
sb.append( "]" );
return sb.toString();
}
public void setPolicy( String policyId, String policySetting )
{
// TODO Auto-generated method stub
}
}

View File

@ -23,6 +23,7 @@ import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.ProjectReference;
import java.io.File;
import java.util.List;
/**
@ -45,7 +46,7 @@ public interface RepositoryProxyConnectors
* @return true if the fetch operation succeeded in obtaining content, false if no content was obtained.
* @throws ProxyException if there was a problem fetching the content from the target repositories.
*/
public boolean fetchFromProxies( ArchivaRepository repository, ArtifactReference artifact )
public File fetchFromProxies( ArchivaRepository repository, ArtifactReference artifact )
throws ProxyException;
/**
@ -60,7 +61,7 @@ public interface RepositoryProxyConnectors
* @return true if the fetch operation succeeded in obtaining content, false if no content was obtained.
* @throws ProxyException if there was a problem fetching the content from the target repositories.
*/
public boolean fetchFromProxies( ArchivaRepository repository, ProjectReference metadata )
public File fetchFromProxies( ArchivaRepository repository, ProjectReference metadata )
throws ProxyException;
/**

View File

@ -1,139 +0,0 @@
package org.apache.maven.archiva.proxy.policy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File;
import java.util.Calendar;
import java.util.HashSet;
import java.util.Set;
/**
* ArtifactUpdatePolicy - tests the local file to see if the transfer should
* occur or not.
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*
* @plexus.component role-hint="artifact-update"
*/
public class ArtifactUpdatePolicy
extends AbstractLogEnabled
implements PrefetchPolicy
{
/**
* The DISABLED policy means that the artifact retrieval isn't even attempted,
* let alone updated locally.
*/
public static final String DISABLED = "disabled";
/**
* <p>
* The DAILY policy means that the artifact retrieval occurs only if one of
* the following conditions are met...
* </p>
* <ul>
* <li>The local artifact is not present.</li>
* <li>The local artifact has a last modified timestamp older than (now - 1 day).</li>
* </ul>
*/
public static final String DAILY = "daily";
/**
* <p>
* The HOURLY policy means that the artifact retrieval occurs only if one of
* the following conditions are met...
* </p>
* <ul>
* <li>The local artifact is not present.</li>
* <li>The local artifact has a last modified timestamp older than (now - 1 hour).</li>
* </ul>
*/
public static final String HOURLY = "hourly";
/**
* The ONCE policy means that the artifact retrieval occurs only if the
* local artifact is not present. This means that the retreival can only
* occur once.
*/
public static final String ONCE = "once";
private Set validPolicyCodes = new HashSet();
public ArtifactUpdatePolicy()
{
validPolicyCodes.add( DISABLED );
validPolicyCodes.add( DAILY );
validPolicyCodes.add( HOURLY );
validPolicyCodes.add( ONCE );
}
public boolean applyPolicy( String policyCode, File localFile )
{
if ( !validPolicyCodes.contains( policyCode ) )
{
// No valid code? false it is then.
getLogger().error( "Unknown policyCode [" + policyCode + "]" );
return false;
}
if ( DISABLED.equals( policyCode ) )
{
// Disabled means no.
return false;
}
if ( !localFile.exists() )
{
// No file means it's ok.
return true;
}
if ( ONCE.equals( policyCode ) )
{
// File exists, but policy is once.
return false;
}
if ( DAILY.equals( policyCode ) )
{
Calendar cal = Calendar.getInstance();
cal.add( Calendar.DAY_OF_MONTH, -1 );
Calendar fileCal = Calendar.getInstance();
fileCal.setTimeInMillis( localFile.lastModified() );
return cal.after( fileCal );
}
if ( HOURLY.equals( policyCode ) )
{
Calendar cal = Calendar.getInstance();
cal.add( Calendar.HOUR, -1 );
Calendar fileCal = Calendar.getInstance();
fileCal.setTimeInMillis( localFile.lastModified() );
return cal.after( fileCal );
}
getLogger().error( "Unhandled policyCode [" + policyCode + "]" );
return false;
}
}

View File

@ -1,252 +0,0 @@
package org.apache.maven.archiva.proxy.policy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.codehaus.plexus.digest.ChecksumFile;
import org.codehaus.plexus.digest.Digester;
import org.codehaus.plexus.digest.DigesterException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
/**
* ChecksumPolicy
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*
* @plexus.component role-hint="checksum"
*/
public class ChecksumPolicy
extends AbstractLogEnabled
implements PostfetchPolicy
{
/**
* The FAIL policy indicates that if the checksum does not match the
* downloaded file, then remove the downloaded artifact, and checksum
* files, and fail the transfer to the client side.
*/
public static final String FAIL = "fail";
/**
* The FIX policy indicates that if the checksum does not match the
* downloaded file, then fix the checksum file locally, and return
* to the client side the corrected checksum.
*/
public static final String FIX = "fix";
/**
* The IGNORE policy indicates that if the checksum is never tested
* and even bad downloads and checksum files are left in place
* on the local repository.
*/
public static final String IGNORE = "ignore";
/**
* @plexus.requirement role-hint="sha1"
*/
private Digester digestSha1;
/**
* @plexus.requirement role-hint="md5"
*/
private Digester digestMd5;
/**
* @plexus.requirement
*/
private ChecksumFile checksumFile;
private Set validPolicyCodes = new HashSet();
public ChecksumPolicy()
{
validPolicyCodes.add( FAIL );
validPolicyCodes.add( FIX );
validPolicyCodes.add( IGNORE );
}
public boolean applyPolicy( String policyCode, File localFile )
{
if ( !validPolicyCodes.contains( policyCode ) )
{
// No valid code? false it is then.
getLogger().error( "Unknown policyCode [" + policyCode + "]" );
return false;
}
if ( IGNORE.equals( policyCode ) )
{
// Ignore.
return true;
}
File sha1File = new File( localFile.getAbsolutePath() + ".sha1" );
File md5File = new File( localFile.getAbsolutePath() + ".md5" );
if ( FAIL.equals( policyCode ) )
{
if ( !sha1File.exists() && !md5File.exists() )
{
getLogger().error( "File " + localFile.getAbsolutePath() + " has no checksum files (sha1 or md5)." );
localFile.delete();
return false;
}
// Test for sha1 first, then md5
if ( sha1File.exists() )
{
try
{
return checksumFile.isValidChecksum( sha1File );
}
catch ( FileNotFoundException e )
{
getLogger().warn( "Unable to find sha1 file: " + sha1File.getAbsolutePath(), e );
return false;
}
catch ( DigesterException e )
{
getLogger().warn( "Unable to process sha1 file: " + sha1File.getAbsolutePath(), e );
return false;
}
catch ( IOException e )
{
getLogger().warn( "Unable to process sha1 file: " + sha1File.getAbsolutePath(), e );
return false;
}
}
if ( md5File.exists() )
{
try
{
return checksumFile.isValidChecksum( md5File );
}
catch ( FileNotFoundException e )
{
getLogger().warn( "Unable to find md5 file: " + md5File.getAbsolutePath(), e );
return false;
}
catch ( DigesterException e )
{
getLogger().warn( "Unable to process md5 file: " + md5File.getAbsolutePath(), e );
return false;
}
catch ( IOException e )
{
getLogger().warn( "Unable to process md5 file: " + md5File.getAbsolutePath(), e );
return false;
}
}
}
if ( FIX.equals( policyCode ) )
{
if ( !sha1File.exists() )
{
try
{
checksumFile.createChecksum( localFile, digestSha1 );
}
catch ( DigesterException e )
{
getLogger().warn( "Unable to create sha1 file: " + e.getMessage(), e );
return false;
}
catch ( IOException e )
{
getLogger().warn( "Unable to create sha1 file: " + e.getMessage(), e );
return false;
}
}
else
{
try
{
checksumFile.isValidChecksum( sha1File );
}
catch ( FileNotFoundException e )
{
getLogger().warn( "Unable to find sha1 file: " + sha1File.getAbsolutePath(), e );
return false;
}
catch ( DigesterException e )
{
getLogger().warn( "Unable to process sha1 file: " + sha1File.getAbsolutePath(), e );
return false;
}
catch ( IOException e )
{
getLogger().warn( "Unable to process sha1 file: " + sha1File.getAbsolutePath(), e );
return false;
}
}
if ( !md5File.exists() )
{
try
{
checksumFile.createChecksum( localFile, digestMd5 );
}
catch ( DigesterException e )
{
getLogger().warn( "Unable to create md5 file: " + e.getMessage(), e );
return false;
}
catch ( IOException e )
{
getLogger().warn( "Unable to create md5 file: " + e.getMessage(), e );
return false;
}
}
else
{
try
{
return checksumFile.isValidChecksum( md5File );
}
catch ( FileNotFoundException e )
{
getLogger().warn( "Unable to find md5 file: " + md5File.getAbsolutePath(), e );
return false;
}
catch ( DigesterException e )
{
getLogger().warn( "Unable to process md5 file: " + md5File.getAbsolutePath(), e );
return false;
}
catch ( IOException e )
{
getLogger().warn( "Unable to process md5 file: " + md5File.getAbsolutePath(), e );
return false;
}
}
}
getLogger().error( "Unhandled policyCode [" + policyCode + "]" );
return false;
}
}

View File

@ -1,42 +0,0 @@
package org.apache.maven.archiva.proxy.policy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
/**
* Policy to apply after the fetch of content, but before returning to
* the client.
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public interface PostfetchPolicy
{
/**
* Apply the policy using the provided policy code and local file.
*
* @param policyCode the policy code to use.
* @param localFile the local file that might affect the policy.
* @return true if the policy passes, false if the policy prevents
* the returning of success to the client.
*/
public boolean applyPolicy( String policyCode, File localFile );
}

View File

@ -1,41 +0,0 @@
package org.apache.maven.archiva.proxy.policy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
/**
* Policy to apply before the fetch of content.
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public interface PrefetchPolicy
{
/**
* Apply the policy using the provided policy code and local file.
*
* @param policyCode the policy code to use.
* @param localFile the local file that might affect the policy.
* @return true if the policy passes, false if the policy prevents the
* fetching of the content.
*/
public boolean applyPolicy( String policyCode, File localFile );
}

View File

@ -20,10 +20,32 @@ package org.apache.maven.archiva.proxy;
*/
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.common.utils.PathUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ProxyConnectorConfiguration;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.ProjectReference;
import org.apache.maven.archiva.policies.urlcache.UrlFailureCache;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayout;
import org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayoutFactory;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.apache.maven.wagon.Wagon;
import org.codehaus.plexus.PlexusTestCase;
import org.easymock.MockControl;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.Locale;
/**
* AbstractProxyTestCase
@ -34,6 +56,144 @@ import java.io.IOException;
public class AbstractProxyTestCase
extends PlexusTestCase
{
protected static final String ID_LEGACY_PROXIED = "legacy-proxied";
protected static final String ID_PROXIED1 = "proxied1";
protected static final String ID_PROXIED2 = "proxied2";
protected static final String ID_DEFAULT_MANAGED = "default-managed-repository";
protected static final String ID_LEGACY_MANAGED = "legacy-managed-repository";
protected static final String REPOPATH_PROXIED_LEGACY = "src/test/repositories/legacy-proxied";
protected static final String REPOPATH_PROXIED1 = "src/test/repositories/proxied1";
protected static final String REPOPATH_PROXIED2 = "src/test/repositories/proxied2";
protected static final String REPOPATH_DEFAULT_MANAGED = "src/test/repositories/managed";
protected static final String REPOPATH_DEFAULT_MANAGED_TARGET = "target/test-repository/managed";
protected static final String REPOPATH_LEGACY_MANAGED = "src/test/repositories/legacy-managed";
protected static final String REPOPATH_LEGACY_MANAGED_TARGET = "target/test-repository/legacy-managed";
protected MockControl wagonMockControl;
protected Wagon wagonMock;
protected RepositoryProxyConnectors proxyHandler;
protected ArchivaRepository managedDefaultRepository;
protected File managedDefaultDir;
protected ArchivaRepository managedLegacyRepository;
protected File managedLegacyDir;
protected BidirectionalRepositoryLayoutFactory layoutFactory;
protected MockConfiguration config;
protected void setUp()
throws Exception
{
super.setUp();
layoutFactory = (BidirectionalRepositoryLayoutFactory) lookup( BidirectionalRepositoryLayoutFactory.class
.getName() );
config = (MockConfiguration) lookup( ArchivaConfiguration.class.getName(), "mock" );
RepositoryConfiguration repoConfig;
// Setup source repository (using default layout)
File repoLocation = getTestFile( REPOPATH_DEFAULT_MANAGED_TARGET );
// faster only to delete this one before copying, the others are done case by case
FileUtils.deleteDirectory( new File( repoLocation, "org/apache/maven/test/get-merged-metadata" ) );
copyDirectoryStructure( getTestFile( REPOPATH_DEFAULT_MANAGED ), repoLocation );
managedDefaultRepository = createRepository( ID_DEFAULT_MANAGED, "Default Managed Repository",
REPOPATH_DEFAULT_MANAGED_TARGET, "default" );
managedDefaultDir = new File( managedDefaultRepository.getUrl().getPath() );
repoConfig = createRepoConfig( managedDefaultRepository );
config.getConfiguration().addRepository( repoConfig );
// Setup source repository (using legacy layout)
repoLocation = getTestFile( REPOPATH_LEGACY_MANAGED_TARGET );
FileUtils.deleteDirectory( repoLocation );
copyDirectoryStructure( getTestFile( REPOPATH_LEGACY_MANAGED ), repoLocation );
managedLegacyRepository = createRepository( ID_LEGACY_MANAGED, "Legacy Managed Repository",
REPOPATH_LEGACY_MANAGED_TARGET, "legacy" );
managedLegacyDir = new File( managedLegacyRepository.getUrl().getPath() );
repoConfig = createRepoConfig( managedLegacyRepository );
config.getConfiguration().addRepository( repoConfig );
// Setup target (proxied to) repository.
saveRepositoryConfig( ID_PROXIED1, "Proxied Repository 1", REPOPATH_PROXIED1, "default" );
// Setup target (proxied to) repository.
saveRepositoryConfig( ID_PROXIED2, "Proxied Repository 2", REPOPATH_PROXIED2, "default" );
// Setup target (proxied to) repository using legacy layout.
saveRepositoryConfig( ID_LEGACY_PROXIED, "Proxied Legacy Repository", REPOPATH_PROXIED_LEGACY, "legacy" );
// Setup the proxy handler.
proxyHandler = (RepositoryProxyConnectors) lookup( RepositoryProxyConnectors.class.getName() );
// Setup the wagon mock.
wagonMockControl = MockControl.createNiceControl( Wagon.class );
wagonMock = (Wagon) wagonMockControl.getMock();
WagonDelegate delegate = (WagonDelegate) lookup( Wagon.ROLE, "test" );
delegate.setDelegate( wagonMock );
System.out.println( "\n.\\ " + getName() + "() \\._________________________________________\n" );
}
protected void saveRepositoryConfig( String id, String name, String path, String layout )
{
RepositoryConfiguration repoConfig = new RepositoryConfiguration();
repoConfig.setId( id );
repoConfig.setName( name );
if ( path.indexOf( "://" ) > 0 )
{
repoConfig.setUrl( path );
}
else
{
repoConfig.setUrl( PathUtil.toUrl( path ) );
}
repoConfig.setLayout( layout );
config.getConfiguration().addRepository( repoConfig );
config.triggerChange( "repository", "" );
}
protected void assertFileEquals( File expectedFile, File actualFile, File sourceFile )
throws Exception
{
assertNotNull( "Expected File should not be null.", expectedFile );
assertNotNull( "Actual File should not be null.", actualFile );
assertTrue( "Check file exists.", actualFile.exists() );
assertEquals( "Check file path matches.", expectedFile.getAbsolutePath(), actualFile.getAbsolutePath() );
String expectedContents = FileUtils.readFileToString( sourceFile, null );
String actualContents = FileUtils.readFileToString( actualFile, null );
assertEquals( "Check file contents.", expectedContents, actualContents );
}
/**
* A faster recursive copy that omits .svn directories.
*
@ -42,7 +202,7 @@ public class AbstractProxyTestCase
* @throws java.io.IOException if there is a copying problem
* @todo get back into plexus-utils, share with converter module
*/
private static void copyDirectoryStructure( File sourceDirectory, File destDirectory )
protected void copyDirectoryStructure( File sourceDirectory, File destDirectory )
throws IOException
{
if ( !sourceDirectory.exists() )
@ -78,8 +238,8 @@ public class AbstractProxyTestCase
{
if ( !destination.exists() && !destination.mkdirs() )
{
throw new IOException(
"Could not create destination directory '" + destination.getAbsolutePath() + "'." );
throw new IOException( "Could not create destination directory '"
+ destination.getAbsolutePath() + "'." );
}
copyDirectoryStructure( file, destination );
@ -91,4 +251,179 @@ public class AbstractProxyTestCase
}
}
}
protected ArtifactReference createArtifactReference( String layoutType, String path )
throws Exception
{
BidirectionalRepositoryLayout layout = layoutFactory.getLayout( layoutType );
ArchivaArtifact artifact = layout.toArtifact( path );
ArtifactReference ref = new ArtifactReference();
ref.setGroupId( artifact.getGroupId() );
ref.setArtifactId( artifact.getArtifactId() );
ref.setVersion( artifact.getVersion() );
ref.setClassifier( artifact.getClassifier() );
ref.setType( artifact.getType() );
return ref;
}
protected ProjectReference createMetadataReference( String layoutType, String path )
throws Exception
{
BidirectionalRepositoryLayout layout = layoutFactory.getLayout( layoutType );
ProjectReference metadata = layout.toProjectReference( path );
return metadata;
}
protected ArchivaRepository createManagedLegacyRepository()
{
return createRepository( "src/test/repositories/legacy-managed", "testManagedLegacyRepo",
"Test Managed (Legacy) Repository", "legacy" );
}
protected ArchivaRepository createProxiedLegacyRepository()
{
return createRepository( "src/test/repositories/legacy-proxied", "testProxiedLegacyRepo",
"Test Proxied (Legacy) Repository", "legacy" );
}
protected RepositoryConfiguration createRepoConfig( ArchivaRepository repo )
{
return createRepoConfig( repo.getId(), repo.getName(), repo.getUrl().toString(), repo.getLayoutType() );
}
protected RepositoryConfiguration createRepoConfig( String id, String name, String path, String layout )
{
RepositoryConfiguration repoConfig = new RepositoryConfiguration();
repoConfig.setId( id );
repoConfig.setName( name );
repoConfig.setUrl( PathUtil.toUrl( path ) );
repoConfig.setLayout( layout );
return repoConfig;
}
protected ArchivaRepository createRepository( String id, String name, String path, String layout )
{
ArchivaRepository repo = new ArchivaRepository( id, name, PathUtil.toUrl( path ) );
repo.getModel().setLayoutName( layout );
return repo;
}
protected void saveConnector( String sourceRepoId, String targetRepoId, String checksumPolicy,
String releasePolicy, String snapshotPolicy, String cacheFailuresPolicy )
{
ProxyConnectorConfiguration connectorConfig = new ProxyConnectorConfiguration();
connectorConfig.setSourceRepoId( sourceRepoId );
connectorConfig.setTargetRepoId( targetRepoId );
connectorConfig.addPolicy( ProxyConnectorConfiguration.POLICY_CHECKSUM, checksumPolicy );
connectorConfig.addPolicy( ProxyConnectorConfiguration.POLICY_RELEASES, releasePolicy );
connectorConfig.addPolicy( ProxyConnectorConfiguration.POLICY_SNAPSHOTS, snapshotPolicy );
connectorConfig.addPolicy( ProxyConnectorConfiguration.POLICY_CACHE_FAILURES, cacheFailuresPolicy );
config.getConfiguration().addProxyConnector( connectorConfig );
config.triggerChange( "proxyConnector", "" );
}
protected UrlFailureCache lookupUrlFailureCache()
throws Exception
{
UrlFailureCache failurlCache = (UrlFailureCache) lookup( UrlFailureCache.class.getName(), "default" );
assertNotNull( "URL Failure Cache cannot be null.", failurlCache );
return failurlCache;
}
protected void assertNoTempFiles( File expectedFile )
{
File workingDir = expectedFile.getParentFile();
if ( ( workingDir == null ) || !workingDir.isDirectory() )
{
return;
}
Collection tmpFiles = FileUtils.listFiles( workingDir, new String[] { "tmp" }, false );
if ( !tmpFiles.isEmpty() )
{
StringBuffer emsg = new StringBuffer();
emsg.append( "Found Temp Files in dir: " ).append( workingDir.getPath() );
Iterator it = tmpFiles.iterator();
while ( it.hasNext() )
{
File tfile = (File) it.next();
emsg.append( "\n " ).append( tfile.getName() );
}
fail( emsg.toString() );
}
}
/**
* Read the first line from the checksum file, and return it (trimmed).
*/
protected String readChecksumFile( File checksumFile )
throws Exception
{
FileReader freader = null;
BufferedReader buf = null;
try
{
freader = new FileReader( checksumFile );
buf = new BufferedReader( freader );
return buf.readLine();
}
finally
{
if ( buf != null )
{
buf.close();
}
if ( freader != null )
{
freader.close();
}
}
}
protected void assertChecksums( File expectedFile, String expectedSha1Contents, String expectedMd5Contents )
throws Exception
{
File sha1File = new File( expectedFile.getAbsolutePath() + ".sha1" );
File md5File = new File( expectedFile.getAbsolutePath() + ".md5" );
if ( expectedSha1Contents == null )
{
assertFalse( "SHA1 File should NOT exist: " + sha1File.getPath(), sha1File.exists() );
}
else
{
assertTrue( "SHA1 File should exist: " + sha1File.getPath(), sha1File.exists() );
String actualSha1Contents = readChecksumFile( sha1File );
assertEquals( "SHA1 File contents: " + sha1File.getPath(), expectedSha1Contents, actualSha1Contents );
}
if ( expectedMd5Contents == null )
{
assertFalse( "MD5 File should NOT exist: " + md5File.getPath(), md5File.exists() );
}
else
{
assertTrue( "MD5 File should exist: " + md5File.getPath(), md5File.exists() );
String actualMd5Contents = readChecksumFile( md5File );
assertEquals( "MD5 File contents: " + md5File.getPath(), expectedMd5Contents, actualMd5Contents );
}
}
protected void assertNotDownloaded( File downloadedFile )
{
assertNull( "Found file: " + downloadedFile + "; but was expecting a failure", downloadedFile );
}
protected static Date getPastDate()
throws ParseException
{
return new SimpleDateFormat( "yyyy-MM-dd", Locale.US ).parse( "2000-01-01" );
}
}

View File

@ -0,0 +1,140 @@
package org.apache.maven.archiva.proxy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.common.utils.PathUtil;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.policies.CachedFailuresPolicy;
import org.apache.maven.archiva.policies.ChecksumPolicy;
import org.apache.maven.archiva.policies.ReleasesPolicy;
import org.apache.maven.archiva.policies.SnapshotsPolicy;
import org.apache.maven.archiva.policies.urlcache.UrlFailureCache;
import org.apache.maven.wagon.TransferFailedException;
import java.io.File;
/**
* CacheFailuresTransferTest
*
* @author Brett Porter
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class CacheFailuresTransferTest
extends AbstractProxyTestCase
{
public void testGetWithCacheFailuresOn()
throws Exception
{
String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
expectedFile.delete();
assertFalse( expectedFile.exists() );
// Configure Repository (usually done within archiva.xml configuration)
saveRepositoryConfig( "badproxied1", "Bad Proxied 1", "test://bad.machine.com/repo/", "default" );
saveRepositoryConfig( "badproxied2", "Bad Proxied 2", "test://bad.machine.com/repo/", "default" );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "badproxied1", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.CACHED );
saveConnector( ID_DEFAULT_MANAGED, "badproxied2", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.CACHED );
wagonMock.getIfNewer( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ), 0 );
TransferFailedException failedException = new TransferFailedException( "transfer failed" );
wagonMockControl.setThrowable( failedException );
wagonMockControl.replay();
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
wagonMockControl.verify();
assertNoTempFiles( expectedFile );
}
public void testGetWithCacheFailuresOff()
throws Exception
{
String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
expectedFile.delete();
assertFalse( expectedFile.exists() );
// Configure Repository (usually done within archiva.xml configuration)
saveRepositoryConfig( "badproxied1", "Bad Proxied 1", "test://bad.machine.com/repo/", "default" );
saveRepositoryConfig( "badproxied2", "Bad Proxied 2", "test://bad.machine.com/repo/", "default" );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "badproxied1", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
saveConnector( ID_DEFAULT_MANAGED, "badproxied2", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
wagonMock.getIfNewer( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ), 0 );
TransferFailedException failedException = new TransferFailedException( "transfer failed" );
wagonMockControl.setThrowable( failedException );
wagonMockControl.replay();
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
wagonMockControl.verify();
assertNoTempFiles( expectedFile );
}
public void testGetWhenInBothProxiedButFirstCacheFailure()
throws Exception
{
String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
expectedFile.delete();
assertFalse( expectedFile.exists() );
String url = PathUtil.toUrl( REPOPATH_PROXIED1 + "/" + path );
// Intentionally set failure on url in proxied1 (for test)
UrlFailureCache failurlCache = lookupUrlFailureCache();
failurlCache.cacheFailure( url );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.CACHED );
saveConnector( ID_DEFAULT_MANAGED, "proxied2", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.CACHED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
// Validate that file actually came from proxied2 (as intended).
File proxied2File = new File( REPOPATH_PROXIED2, path );
assertFileEquals( expectedFile, downloadedFile, proxied2File );
assertNoTempFiles( expectedFile );
}
}

View File

@ -0,0 +1,459 @@
package org.apache.maven.archiva.proxy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.io.FileUtils;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.policies.CachedFailuresPolicy;
import org.apache.maven.archiva.policies.ChecksumPolicy;
import org.apache.maven.archiva.policies.ReleasesPolicy;
import org.apache.maven.archiva.policies.SnapshotsPolicy;
import org.apache.maven.wagon.TransferFailedException;
import java.io.File;
/**
* ChecksumTransferTest
*
* @author Brett Porter
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class ChecksumTransferTest
extends AbstractProxyTestCase
{
public void testGetChecksumBothCorrect()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-both-right/1.0/get-checksum-both-right-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORED, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "066d76e459f7782c312c31e8a11b3c0f1e3e43a7 *get-checksum-both-right-1.0.jar",
"e58f30c6a150a2e843552438d18e15cb *get-checksum-both-right-1.0.jar" );
}
public void testGetChecksumCorrectSha1NoMd5()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORED, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar", null );
}
public void testGetChecksumNoSha1CorrectMd5()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-md5-only/1.0/get-checksum-md5-only-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORED, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, null, "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
}
public void testGetWithNoChecksumsUsingIgnoredSetting()
throws Exception
{
String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORED, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, null, null );
}
public void testGetChecksumBadSha1BadMd5IgnoredSetting()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-both-bad/1.0/get-checksum-both-bad-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORED, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "invalid checksum file", "invalid checksum file" );
}
public void testGetChecksumBadSha1BadMd5FailSetting()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-both-bad/1.0/get-checksum-both-bad-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
assertChecksums( expectedFile, null, null );
}
public void testGetChecksumBadSha1BadMd5FixSetting()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-both-bad/1.0/get-checksum-both-bad-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "4ec20a12dc91557330bd0b39d1805be5e329ae56 get-checksum-both-bad-1.0.jar",
"a292491a35925465e693a44809a078b5 get-checksum-both-bad-1.0.jar" );
}
public void testGetChecksumCorrectSha1BadMd5UsingFailSetting()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-sha1-bad-md5/1.0/get-checksum-sha1-bad-md5-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
assertChecksums( expectedFile, null, null );
}
public void testGetChecksumNoSha1CorrectMd5UsingFailSetting()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-md5-only/1.0/get-checksum-md5-only-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
// This is a success situation. No SHA1 with a Good MD5.
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, null, "f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
}
public void testGetWithNoChecksumsUsingFailSetting()
throws Exception
{
String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
assertChecksums( expectedFile, null, null );
}
public void testGetChecksumCorrectSha1BadMd5UsingIgnoredSetting()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-sha1-bad-md5/1.0/get-checksum-sha1-bad-md5-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORED, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
"invalid checksum file" );
}
public void testGetChecksumCorrectSha1BadMd5UsingFixSetting()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-sha1-bad-md5/1.0/get-checksum-sha1-bad-md5-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "3dd1a3a57b807d3ef3fbc6013d926c891cbb8670 *get-checksum-sha1-bad-md5-1.0.jar",
"c35f3b76268b73a4ba617f6f275c49ab get-checksum-sha1-bad-md5-1.0.jar" );
}
public void testGetChecksumNoSha1CorrectMd5UsingFixSetting()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-md5-only/1.0/get-checksum-md5-only-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "71f7dc3f72053a3f2d9fdd6fef9db055ef957ffb get-checksum-md5-only-1.0.jar",
"f3af5201bf8da801da37db8842846e1c *get-checksum-md5-only-1.0.jar" );
}
public void testGetWithNoChecksumsUsingFixSetting()
throws Exception
{
String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "1f12821c5e43e1a0b76b9564a6ddb0548ccb9486 get-default-layout-1.0.jar",
"3f7341545f21226b6f49a3c2704cb9be get-default-layout-1.0.jar" );
}
public void testGetChecksumTransferFailed()
throws Exception
{
String path = "org/apache/maven/test/get-checksum-sha1-only/1.0/get-checksum-sha1-only-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
FileUtils.deleteDirectory( expectedFile.getParentFile() );
assertFalse( expectedFile.getParentFile().exists() );
assertFalse( expectedFile.exists() );
saveRepositoryConfig( "badproxied", "Bad Proxied", "test://bad.machine.com/repo/", "default" );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "badproxied", ChecksumPolicy.IGNORED, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
wagonMock.getIfNewer( path, new File( expectedFile.getAbsolutePath() + ".tmp" ), 0 );
wagonMockControl.setReturnValue( true );
wagonMock.getIfNewer( path + ".sha1", new File( expectedFile.getAbsolutePath() + ".sha1.tmp" ), 0 );
wagonMockControl.setReturnValue( true );
wagonMock.getIfNewer( path + ".md5", new File( expectedFile.getAbsolutePath() + ".md5.tmp" ), 0 );
wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
wagonMockControl.replay();
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
wagonMockControl.verify();
// Do what the mock doesn't do.
String proxyPath = new File( REPOPATH_PROXIED1, path ).getAbsolutePath();
String localPath = new File( managedDefaultDir, path ).getAbsolutePath();
FileUtils.copyFile( new File( proxyPath ), new File( localPath ) );
FileUtils.copyFile( new File( proxyPath + ".sha1" ), new File( localPath + ".sha1" ) );
// Test results.
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "748a3a013bf5eacf2bbb40a2ac7d37889b728837 *get-checksum-sha1-only-1.0.jar", null );
}
public void testGetAlwaysBadChecksumPresentLocallyAbsentRemoteUsingIgnoredSetting()
throws Exception
{
String path = "org/apache/maven/test/get-bad-local-checksum/1.0/get-bad-local-checksum-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
assertTrue( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.IGNORED, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
// There are no hashcodes on the proxy side to download, hence the local ones should remain invalid.
assertChecksums( expectedFile, "invalid checksum file", "invalid checksum file" );
}
public void testGetAlwaysBadChecksumPresentLocallyAbsentRemoteUsingFailSetting()
throws Exception
{
String path = "org/apache/maven/test/get-bad-local-checksum/1.0/get-bad-local-checksum-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
assertTrue( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FAIL, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
assertNoTempFiles( expectedFile );
// There are no hashcodes on the proxy side to download.
// The FAIL policy will delete the checksums as bad.
assertChecksums( expectedFile, null, null );
}
public void testGetAlwaysBadChecksumPresentLocallyAbsentRemoteUsingFixSetting()
throws Exception
{
String path = "org/apache/maven/test/get-bad-local-checksum/1.0/get-bad-local-checksum-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
assertTrue( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
assertChecksums( expectedFile, "96a08dc80a108cba8efd3b20aec91b32a0b2cbd4 get-bad-local-checksum-1.0.jar",
"46fdd6ca55bf1d7a7eb0c858f41e0ccd get-bad-local-checksum-1.0.jar" );
}
}

View File

@ -0,0 +1,310 @@
package org.apache.maven.archiva.proxy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.policies.CachedFailuresPolicy;
import org.apache.maven.archiva.policies.ChecksumPolicy;
import org.apache.maven.archiva.policies.ReleasesPolicy;
import org.apache.maven.archiva.policies.SnapshotsPolicy;
import org.apache.maven.wagon.TransferFailedException;
import java.io.File;
/**
* ManagedDefaultTransferTest
*
* @author Brett Porter
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class ManagedDefaultTransferTest
extends AbstractProxyTestCase
{
public void testGetDefaultLayoutNotPresent()
throws Exception
{
String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
// Ensure file isn't present first.
expectedFile.delete();
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FIX, ReleasesPolicy.ONCE, SnapshotsPolicy.ONCE,
CachedFailuresPolicy.IGNORED );
// Attempt the proxy fetch.
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File sourceFile = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, sourceFile );
assertNoTempFiles( expectedFile );
}
/**
* The attempt here should result in no file being transferred.
*
* The file exists locally, and the policy is ONCE.
*
* @throws Exception
*/
public void testGetDefaultLayoutAlreadyPresentPolicyOnce()
throws Exception
{
String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
assertTrue( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FIX, ReleasesPolicy.ONCE, SnapshotsPolicy.ONCE,
CachedFailuresPolicy.IGNORED );
// Attempt the proxy fetch.
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertFileEquals( expectedFile, downloadedFile, expectedFile );
assertNoTempFiles( expectedFile );
}
/**
* The attempt here should result in file being transferred.
*
* The file exists locally, and the policy is IGNORE.
*
* @throws Exception
*/
public void testGetDefaultLayoutAlreadyPresentPolicyIgnored()
throws Exception
{
String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
long originalModificationTime = expectedFile.lastModified();
ArtifactReference artifact = createArtifactReference( "default", path );
assertTrue( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
// Attempt the proxy fetch.
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxiedFile = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
long proxiedLastModified = proxiedFile.lastModified();
long downloadedLastModified = downloadedFile.lastModified();
assertFalse( "Check file timestamp is not that of proxy:", proxiedLastModified == downloadedLastModified );
assertEquals( "Check file timestamp is that of original managed file:", originalModificationTime,
downloadedLastModified );
assertNoTempFiles( expectedFile );
}
/**
* The attempt here should result in file being transferred.
*
* The file exists locally, is over 6 years old, and the policy is DAILY.
*
* @throws Exception
*/
public void testGetDefaultLayoutRemoteUpdate()
throws Exception
{
String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
assertTrue( expectedFile.exists() );
expectedFile.setLastModified( getPastDate().getTime() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FIX, ReleasesPolicy.DAILY,
SnapshotsPolicy.DAILY, CachedFailuresPolicy.IGNORED );
// Attempt the proxy fetch.
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxiedFile = new File( REPOPATH_PROXIED1, path );
assertFileEquals( expectedFile, downloadedFile, proxiedFile );
assertNoTempFiles( expectedFile );
}
public void testGetWhenInBothProxiedRepos()
throws Exception
{
String path = "org/apache/maven/test/get-in-both-proxies/1.0/get-in-both-proxies-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
expectedFile.delete();
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
// Attempt the proxy fetch.
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied1File = new File( REPOPATH_PROXIED1, path );
File proxied2File = new File( REPOPATH_PROXIED2, path );
assertFileEquals( expectedFile, downloadedFile, proxied1File );
assertNoTempFiles( expectedFile );
// TODO: is this check even needed if it passes above?
String actualContents = FileUtils.readFileToString( downloadedFile, null );
String badContents = FileUtils.readFileToString( proxied2File, null );
assertFalse( "Downloaded file contents should not be that of proxy 2", StringUtils.equals( actualContents,
badContents ) );
}
public void testGetInSecondProxiedRepo()
throws Exception
{
String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
expectedFile.delete();
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
// Attempt the proxy fetch.
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied2File = new File( REPOPATH_PROXIED2, path );
assertFileEquals( expectedFile, downloadedFile, proxied2File );
assertNoTempFiles( expectedFile );
}
public void testNotFoundInAnyProxies()
throws Exception
{
String path = "org/apache/maven/test/does-not-exist/1.0/does-not-exist-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED1, ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
saveConnector( ID_DEFAULT_MANAGED, ID_LEGACY_PROXIED, ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
// Attempt the proxy fetch.
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNull( "File returned was: " + downloadedFile + "; should have got a not found exception", downloadedFile );
assertNoTempFiles( expectedFile );
}
public void testGetInSecondProxiedRepoFirstFails()
throws Exception
{
String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
expectedFile.delete();
assertFalse( expectedFile.exists() );
// Configure Repository (usually done within archiva.xml configuration)
saveRepositoryConfig( "badproxied", "Bad Proxied", "test://bad.machine.com/repo/", "default" );
wagonMock.getIfNewer( path, new File( expectedFile.getAbsolutePath() + ".tmp" ), 0 );
wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
wagonMockControl.replay();
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "badproxied", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
saveConnector( ID_DEFAULT_MANAGED, ID_PROXIED2, ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
// Attempt the proxy fetch.
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
wagonMockControl.verify();
File proxied2File = new File( REPOPATH_PROXIED2, path );
assertFileEquals( expectedFile, downloadedFile, proxied2File );
assertNoTempFiles( expectedFile );
}
public void testGetAllRepositoriesFail()
throws Exception
{
String path = "org/apache/maven/test/get-in-second-proxy/1.0/get-in-second-proxy-1.0.jar";
File expectedFile = new File( managedDefaultDir, path );
ArtifactReference artifact = createArtifactReference( "default", path );
expectedFile.delete();
assertFalse( expectedFile.exists() );
// Configure Repository (usually done within archiva.xml configuration)
saveRepositoryConfig( "badproxied1", "Bad Proxied 1", "test://bad.machine.com/repo/", "default" );
saveRepositoryConfig( "badproxied2", "Bad Proxied 2", "test://dead.machine.com/repo/", "default" );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "badproxied1", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
saveConnector( ID_DEFAULT_MANAGED, "badproxied2", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
wagonMock.getIfNewer( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ), 0 );
wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
wagonMock.getIfNewer( path, new File( expectedFile.getParentFile(), expectedFile.getName() + ".tmp" ), 0 );
wagonMockControl.setThrowable( new TransferFailedException( "transfer failed" ) );
wagonMockControl.replay();
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
assertNotDownloaded( downloadedFile );
wagonMockControl.verify();
assertNoTempFiles( expectedFile );
// TODO: do not want failures to present as a not found!
// TODO: How much information on each failure should we pass back to the user vs. logging in the proxy?
}
}

View File

@ -0,0 +1,221 @@
package org.apache.maven.archiva.proxy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.policies.CachedFailuresPolicy;
import org.apache.maven.archiva.policies.ChecksumPolicy;
import org.apache.maven.archiva.policies.ReleasesPolicy;
import org.apache.maven.archiva.policies.SnapshotsPolicy;
import java.io.File;
/**
* ManagedLegacyTransferTest
*
* @author Brett Porter
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class ManagedLegacyTransferTest
extends AbstractProxyTestCase
{
public void testLegacyManagedRepoGetNotPresent()
throws Exception
{
String path = "org.apache.maven.test/jars/get-default-layout-1.0.jar";
File expectedFile = new File( managedLegacyDir, path );
ArtifactReference artifact = createArtifactReference( "legacy", path );
expectedFile.delete();
assertFalse( expectedFile.exists() );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_LEGACY_MANAGED, ID_PROXIED1, ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, artifact );
File proxied2File = new File( REPOPATH_PROXIED2,
"org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar" );
assertFileEquals( expectedFile, downloadedFile, proxied2File );
assertNoTempFiles( expectedFile );
// TODO: timestamp preservation requires support for that in wagon
// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
}
public void testLegacyManagedRepoGetAlreadyPresent()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
// File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( expectedFile, null );
// long originalModificationTime = expectedFile.lastModified();
//
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
// File proxiedFile = new File( proxiedRepository1.getBasedir(),
// "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar" );
// String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
// assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
// assertEquals( "Check file timestamp is that of original managed file", originalModificationTime, file
// .lastModified() );
}
public void testLegacyProxyRepoGetNotPresent()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// expectedFile.delete();
// assertFalse( expectedFile.exists() );
//
// File file = requestHandler.get( path, legacyProxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// File proxiedFile = new File( legacyProxiedRepository.getBasedir(),
// "org.apache.maven.test/jars/get-default-layout-1.0.jar" );
// String expectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
// TODO: timestamp preservation requires support for that in wagon
// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
}
public void testLegacyProxyRepoGetAlreadyPresent()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( expectedFile, null );
// long originalModificationTime = expectedFile.lastModified();
//
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, legacyProxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
// File proxiedFile = new File( legacyProxiedRepository.getBasedir(),
// "org.apache.maven.test/jars/get-default-layout-present-1.0.jar" );
// String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
// assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
// assertEquals( "Check file timestamp is that of original managed file", originalModificationTime, file
// .lastModified() );
}
public void testLegacyManagedAndProxyRepoGetNotPresent()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org.apache.maven.test/jars/get-default-layout-1.0.jar";
// File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
//
// assertFalse( expectedFile.exists() );
//
// File file = requestHandler.get( path, legacyProxiedRepositories, legacyManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// File proxiedFile = new File( legacyProxiedRepository.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
// TODO: timestamp preservation requires support for that in wagon
// assertEquals( "Check file timestamp", proxiedFile.lastModified(), file.lastModified() );
}
public void testLegacyManagedAndProxyRepoGetAlreadyPresent()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
// File expectedFile = new File( legacyManagedRepository.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( expectedFile, null );
// long originalModificationTime = expectedFile.lastModified();
//
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, legacyProxiedRepositories, legacyManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
// File proxiedFile = new File( legacyProxiedRepository.getBasedir(), path );
// String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
// assertFalse( "Check file timestamp is not that of proxy", proxiedFile.lastModified() == file.lastModified() );
// assertEquals( "Check file timestamp is that of original managed file", originalModificationTime, file
// .lastModified() );
}
public void testLegacyRequestConvertedToDefaultPathInManagedRepo()
throws Exception
{
fail( "Implemented " + getName() );
// Check that a Maven1 legacy request is translated to a maven2 path in
// the managed repository.
// String legacyPath = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
// String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( legacyPath, legacyProxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
}
public void testDefaultRequestConvertedToLegacyPathInManagedRepo()
throws Exception
{
fail( "Implemented " + getName() );
// Check that a Maven2 default request is translated to a legacy path in
// the managed repository.
// String legacyPath = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
// String path = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
// File expectedFile = new File( legacyManagedRepository.getBasedir(), legacyPath );
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
}
}

View File

@ -0,0 +1,292 @@
package org.apache.maven.archiva.proxy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.model.ProjectReference;
import org.apache.maven.archiva.policies.CachedFailuresPolicy;
import org.apache.maven.archiva.policies.ChecksumPolicy;
import org.apache.maven.archiva.policies.ReleasesPolicy;
import org.apache.maven.archiva.policies.SnapshotsPolicy;
import java.io.File;
/**
* MetadataTransferTest
*
* @author Brett Porter
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class MetadataTransferTest
extends AbstractProxyTestCase
{
public void testGetMetadataNotPresent()
throws Exception
{
String path = "org/apache/maven/test/dummy-artifact/1.0/maven-metadata.xml";
File expectedFile = new File( managedDefaultDir, path );
ProjectReference metadata = createMetadataReference( "default", path );
// Configure Connector (usually done within archiva.xml configuration)
saveConnector( ID_DEFAULT_MANAGED, "proxied1", ChecksumPolicy.FIX, ReleasesPolicy.IGNORED,
SnapshotsPolicy.IGNORED, CachedFailuresPolicy.IGNORED );
File downloadedFile = proxyHandler.fetchFromProxies( managedDefaultRepository, metadata );
assertNotDownloaded( downloadedFile );
// String path = "org/apache/maven/test/dummy-artifact/1.0/maven-metadata.xml";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// assertFalse( expectedFile.exists() );
//
// try
// {
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// fail( "Found file: " + file + "; but was expecting a failure" );
// }
// catch ( ResourceDoesNotExistException e )
// {
// // expected
//
// assertFalse( expectedFile.exists() );
// }
}
public void testGetMetadataProxied()
{
String path = "org/apache/maven/test/get-default-metadata/1.0/maven-metadata.xml";
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-default-metadata/1.0/maven-metadata.xml";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// FileUtils.deleteDirectory( expectedFile.getParentFile() );
// assertFalse( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// String expectedContents = getExpectedMetadata( "get-default-metadata", "1.0" );
// assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
}
public void testGetMetadataMergeRepos()
{
String path = "org/apache/maven/test/get-merged-metadata/maven-metadata.xml";
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-merged-metadata/maven-metadata.xml";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
//
// String expectedContents = getExpectedMetadata( "get-merged-metadata", getVersioning(
// Arrays.asList( new String[]{"0.9", "1.0", "2.0", "3.0", "5.0", "4.0"} ), file ) );
//
// assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
}
public void testGetMetadataRemovedFromProxies()
{
String path = "org/apache/maven/test/get-removed-metadata/1.0/maven-metadata.xml";
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-removed-metadata/1.0/maven-metadata.xml";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// String expectedContents =
// FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
//
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
}
public void testGetReleaseMetadataNotExpired()
{
String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// String expectedContents =
// FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
//
// assertTrue( expectedFile.exists() );
//
// new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
//
// proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
// proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
//
// String unexpectedContents =
// FileUtils.readFileToString( new File( proxiedRepository1.getBasedir(), path ), null );
// assertFalse( "Check content doesn't match proxy version",
// unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
public void testGetSnapshotMetadataNotExpired()
{
String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// String expectedContents =
// FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
//
// assertTrue( expectedFile.exists() );
//
// new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
//
// proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
// proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
//
// String unexpectedContents =
// FileUtils.readFileToString( new File( proxiedRepository1.getBasedir(), path ), null );
// assertFalse( "Check content doesn't match proxy version",
// unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
public void testGetReleaseMetadataExpired()
{
String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// String unexpectedContents =
// FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
//
// assertTrue( expectedFile.exists() );
//
// new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
//
// proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
// proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
//
// String expectedContents = getExpectedMetadata( "get-updated-metadata", getVersioning(
// Arrays.asList( new String[]{"1.0", "2.0"} ), file ) );
//
// assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
// assertFalse( "Check content doesn't match proxy version",
// unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
public void testGetSnapshotMetadataExpired()
{
String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-updated-metadata/1.0-SNAPSHOT/maven-metadata.xml";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// String unexpectedContents =
// FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
//
// assertTrue( expectedFile.exists() );
//
// new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
//
// proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_NEVER );
// proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
//
// String expectedContents =
// getExpectedMetadata( "get-updated-metadata", "1.0-SNAPSHOT", getVersioning( "20050831.111213", 2, file ) );
//
// assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
// assertFalse( "Check content doesn't match proxy version",
// unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
public void testGetMetadataNotUpdated()
{
String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// String expectedContents =
// FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
//
// assertTrue( expectedFile.exists() );
//
// File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
// new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( proxiedFile.lastModified() );
//
// proxiedRepository1.getReleases().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
//
// String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertFalse( "Check content doesn't match proxy version",
// unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
public void testGetMetadataUpdated()
{
String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-updated-metadata/maven-metadata.xml";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// String unexpectedContents =
// FileUtils.readFileToString( new File( defaultManagedRepository.getBasedir(), path ), null );
//
// assertTrue( expectedFile.exists() );
//
// new File( expectedFile.getParentFile(), ".metadata-proxied1" ).setLastModified( getPastDate().getTime() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
//
// String expectedContents = getExpectedMetadata( "get-updated-metadata", getVersioning(
// Arrays.asList( new String[]{"1.0", "2.0"} ), file ) );
// assertEquals( "Check content matches", expectedContents, FileUtils.readFileToString( file, null ) );
// assertFalse( "Check content doesn't match old version",
// unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
}

View File

@ -0,0 +1,91 @@
package org.apache.maven.archiva.proxy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryException;
import org.codehaus.plexus.registry.RegistryListener;
import org.easymock.MockControl;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* MockConfiguration
*
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.configuration.ArchivaConfiguration"
* role-hint="mock"
*/
public class MockConfiguration
implements ArchivaConfiguration
{
private Configuration configuration = new Configuration();
private List listeners = new ArrayList();
private MockControl registryControl;
private Registry registryMock;
public MockConfiguration()
{
registryControl = MockControl.createNiceControl( Registry.class );
registryMock = (Registry) registryControl.getMock();
}
public void addChangeListener( RegistryListener listener )
{
listeners.add( listener );
}
public Configuration getConfiguration()
{
return configuration;
}
public void save( Configuration configuration )
throws RegistryException
{
/* do nothing */
}
public void triggerChange( String name, String value )
{
Iterator it = listeners.iterator();
while ( it.hasNext() )
{
RegistryListener listener = (RegistryListener) it.next();
try
{
listener.afterConfigurationChange( registryMock, name, value );
}
catch ( Exception e )
{
e.printStackTrace();
}
}
}
}

View File

@ -0,0 +1,194 @@
package org.apache.maven.archiva.proxy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.PathUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.RepositoryConfiguration;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.model.ProjectReference;
import org.apache.maven.archiva.policies.CachedFailuresPolicy;
import org.apache.maven.archiva.policies.ChecksumPolicy;
import org.apache.maven.archiva.policies.ReleasesPolicy;
import org.apache.maven.archiva.policies.SnapshotsPolicy;
import org.apache.maven.archiva.policies.urlcache.UrlFailureCache;
import org.apache.maven.wagon.ResourceDoesNotExistException;
import org.apache.maven.wagon.TransferFailedException;
import org.apache.maven.wagon.Wagon;
import org.easymock.MockControl;
import java.io.File;
import java.io.IOException;
import java.text.ParseException;
import java.util.Arrays;
/**
* RelocateTransferTest
*
* @author Brett Porter
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class RelocateTransferTest
extends AbstractProxyTestCase
{
public void testRelocateMaven1Request()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org.apache.maven.test/jars/get-relocated-artefact-1.0.jar";
// String relocatedPath = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
//
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
}
public void testDoublyRelocateMaven1Request()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org.apache.maven.test/jars/get-doubly-relocated-artefact-1.0.jar";
// String relocatedPath = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
//
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
}
public void testRelocateMaven1PomRequest()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org.apache.maven.test/poms/get-relocated-artefact-with-pom-1.0.pom";
// String relocatedPath = "org/apache/maven/test/get-default-layout-present-with-pom/1.0/get-default-layout-present-with-pom-1.0.pom";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
//
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
//
// assertTrue( expectedFile.exists() );
}
public void testRelocateMaven1PomRequestMissingTarget()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org.apache.maven.test/poms/get-relocated-artefact-1.0.pom";
// String relocatedPath = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.pom";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
//
// assertFalse( expectedFile.exists() );
//
// try
// {
// requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// fail( "Should have failed to find target POM" );
// }
// catch ( ResourceDoesNotExistException e )
// {
// assertTrue( true );
// }
}
public void testRelocateMaven1ChecksumRequest()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org.apache.maven.test/jars/get-relocated-artefact-1.0.jar.md5";
// String relocatedPath = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar.md5";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
//
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
//
// assertTrue( expectedFile.exists() );
//
// path = "org.apache.maven.test/jars/get-relocated-artefact-1.0.jar.sha1";
// relocatedPath = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar.sha1";
// expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
//
// assertFalse( expectedFile.exists() );
//
// try
// {
// requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// fail( "Checksum was not present, should not be found" );
// }
// catch ( ResourceDoesNotExistException e )
// {
// assertTrue( true );
// }
}
public void testRelocateMaven2Request()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-relocated-artefact/1.0/get-relocated-artefact-1.0.jar";
// String relocatedPath = "org/apache/maven/test/get-default-layout-present/1.0/get-default-layout-present-1.0.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), relocatedPath );
//
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
}
public void testRelocateMaven2RequestInLegacyManagedRepo()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-relocated-artefact/1.0/get-relocated-artefact-1.0.jar";
// String relocatedPath = "org.apache.maven.test/jars/get-default-layout-present-1.0.jar";
// File expectedFile = new File( legacyManagedRepository.getBasedir(), relocatedPath );
//
// assertTrue( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, legacyManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
}
}

View File

@ -1,116 +0,0 @@
package org.apache.maven.archiva.proxy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.model.ArchivaRepository;
import org.apache.maven.wagon.Wagon;
import org.codehaus.plexus.PlexusTestCase;
import org.easymock.MockControl;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
/**
* RepositoryProxyConnectorsTest
*
* @author Brett Porter
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class RepositoryProxyConnectorsTest
extends PlexusTestCase
{
private MockControl wagonMockControl;
private Wagon wagonMock;
private RepositoryProxyConnectors proxyHandler;
private ArchivaRepository createRepository( String repoPath, String id, String name, String layout )
{
File repoDir = getTestFile( repoPath );
String repoUrl = "file://" + StringUtils.replaceChars( repoDir.getAbsolutePath(), '\\', '/' );
ArchivaRepository repo = new ArchivaRepository( id, name, repoUrl );
repo.getModel().setLayoutName( layout );
return repo;
}
private ArchivaRepository createManagedLegacyRepository()
{
return createRepository( "src/test/repositories/legacy-managed", "testManagedLegacyRepo",
"Test Managed (Legacy) Repository", "legacy" );
}
private ArchivaRepository createProxiedLegacyRepository()
{
return createRepository( "src/test/repositories/legacy-proxied", "testProxiedLegacyRepo",
"Test Proxied (Legacy) Repository", "legacy" );
}
protected void setUp()
throws Exception
{
super.setUp();
proxyHandler = (RepositoryProxyConnectors) lookup( RepositoryProxyConnectors.class.getName() );
File repoLocation = getTestFile( "target/test-repository/managed" );
// faster only to delete this one before copying, the others are done case by case
FileUtils.deleteDirectory( new File( repoLocation, "org/apache/maven/test/get-merged-metadata" ) );
copyDirectoryStructure( getTestFile( "src/test/repositories/managed" ), repoLocation );
defaultLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE, "default" );
defaultManagedRepository = createRepository( "managed-repository", repoLocation );
repoLocation = getTestFile( "target/test-repository/legacy-managed" );
FileUtils.deleteDirectory( repoLocation );
copyDirectoryStructure( getTestFile( "src/test/repositories/legacy-managed" ), repoLocation );
ArtifactRepositoryLayout legacyLayout = (ArtifactRepositoryLayout) lookup( ArtifactRepositoryLayout.ROLE,
"legacy" );
legacyManagedRepository = createRepository( "managed-repository", repoLocation, legacyLayout );
File location = getTestFile( "src/test/repositories/proxied1" );
proxiedRepository1 = createRepository( "proxied1", location );
location = getTestFile( "src/test/repositories/proxied2" );
proxiedRepository2 = createRepository( "proxied2", location );
proxiedRepositories = new ArrayList( 2 );
proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
location = getTestFile( "src/test/repositories/legacy-proxied" );
legacyProxiedRepository = createRepository( "legacy-proxied", location, legacyLayout );
legacyProxiedRepositories = Collections.singletonList( createProxiedRepository( legacyProxiedRepository ) );
wagonMockControl = MockControl.createNiceControl( Wagon.class );
wagonMock = (Wagon) wagonMockControl.getMock();
WagonDelegate delegate = (WagonDelegate) lookup( Wagon.ROLE, "test" );
delegate.setDelegate( wagonMock );
}
}

View File

@ -0,0 +1,320 @@
package org.apache.maven.archiva.proxy;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.wagon.ResourceDoesNotExistException;
import java.io.IOException;
import java.text.ParseException;
/**
* SnapshotTransferTest
*
* @author Brett Porter
* @author <a href="mailto:joakim@erdfelt.com">Joakim Erdfelt</a>
* @version $Id$
*/
public class SnapshotTransferTest
extends AbstractProxyTestCase
{
public void testSnapshotNonExistant()
{
String path = "org/apache/maven/test/does-not-exist/1.0-SNAPSHOT/does-not-exist-1.0-SNAPSHOT.jar";
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/does-not-exist/1.0-SNAPSHOT/does-not-exist-1.0-SNAPSHOT.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// assertFalse( expectedFile.exists() );
//
// try
// {
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
// fail( "File returned was: " + file + "; should have got a not found exception" );
// }
// catch ( ResourceDoesNotExistException e )
// {
// // expected, but check file was not created
// assertFalse( expectedFile.exists() );
// }
}
public void testTimestampDrivenSnapshotNotPresentAlready()
{
String path = "org/apache/maven/test/does-not-exist/1.0-SNAPSHOT/does-not-exist-1.0-SNAPSHOT.jar";
fail( "Implemented " + getName() );
// String path =
// "org/apache/maven/test/get-timestamped-snapshot/1.0-SNAPSHOT/get-timestamped-snapshot-1.0-SNAPSHOT.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// expectedFile.delete();
// assertFalse( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
}
public void testNewerTimestampDrivenSnapshotOnFirstRepo()
throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
{
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// assertTrue( expectedFile.exists() );
//
// expectedFile.setLastModified( getPastDate().getTime() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
}
public void testOlderTimestampDrivenSnapshotOnFirstRepo()
throws ResourceDoesNotExistException, ProxyException, IOException
{
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( expectedFile, null );
//
// assertTrue( expectedFile.exists() );
//
// expectedFile.setLastModified( getFutureDate().getTime() );
//
// proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
//
// File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
// String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
public void testNewerTimestampDrivenSnapshotOnSecondRepoThanFirstNotPresentAlready()
throws Exception
{
// TODO: wagon may not support timestamps (yet)
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-timestamped-snapshot-in-both/1.0-SNAPSHOT/get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// assertFalse( expectedFile.exists() );
//
// File repoLocation = getTestFile( "target/test-repository/proxied1" );
// FileUtils.deleteDirectory( repoLocation );
// copyDirectoryStructure( getTestFile( "src/test/repositories/proxied1" ), repoLocation );
// proxiedRepository1 = createRepository( "proxied1", repoLocation );
//
// new File( proxiedRepository1.getBasedir(), path ).setLastModified( getPastDate().getTime() );
//
// proxiedRepositories.clear();
// proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
// proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
//
// File proxiedFile = new File( proxiedRepository2.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
//
// proxiedFile = new File( proxiedRepository1.getBasedir(), path );
// String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
public void testOlderTimestampDrivenSnapshotOnSecondRepoThanFirstNotPresentAlready()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-timestamped-snapshot-in-both/1.0-SNAPSHOT/get-timestamped-snapshot-in-both-1.0-SNAPSHOT.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// expectedFile.delete();
// assertFalse( expectedFile.exists() );
//
// File repoLocation = getTestFile( "target/test-repository/proxied2" );
// FileUtils.deleteDirectory( repoLocation );
// copyDirectoryStructure( getTestFile( "src/test/repositories/proxied2" ), repoLocation );
// proxiedRepository2 = createRepository( "proxied2", repoLocation );
//
// new File( proxiedRepository2.getBasedir(), path ).setLastModified( getPastDate().getTime() );
//
// proxiedRepositories.clear();
// proxiedRepositories.add( createProxiedRepository( proxiedRepository1 ) );
// proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
//
// File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
//
// proxiedFile = new File( proxiedRepository2.getBasedir(), path );
// String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
public void testTimestampDrivenSnapshotNotExpired()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// assertTrue( expectedFile.exists() );
//
// File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
// proxiedFile.setLastModified( getFutureDate().getTime() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// String expectedContents = FileUtils.readFileToString( expectedFile, null );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
//
// String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
public void testTimestampDrivenSnapshotNotUpdated()
throws Exception
{
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-present-timestamped-snapshot/1.0-SNAPSHOT/get-present-timestamped-snapshot-1.0-SNAPSHOT.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( expectedFile, null );
//
// assertTrue( expectedFile.exists() );
//
// File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
// expectedFile.setLastModified( proxiedFile.lastModified() );
//
// proxiedRepository1.getSnapshots().setUpdatePolicy( ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS );
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
//
// String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
public void testTimestampDrivenSnapshotNotPresentAlreadyExpiredCacheFailure()
throws ResourceDoesNotExistException, ProxyException, IOException
{
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-timestamped-snapshot/1.0-SNAPSHOT/get-timestamped-snapshot-1.0-SNAPSHOT.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// expectedFile.delete();
// assertFalse( expectedFile.exists() );
//
// proxiedRepositories.clear();
// ProxiedArtifactRepository proxiedArtifactRepository = createProxiedRepository( proxiedRepository1 );
// proxiedArtifactRepository.addFailure( path, ALWAYS_UPDATE_POLICY );
// proxiedRepositories.add( proxiedArtifactRepository );
// proxiedRepositories.add( createProxiedRepository( proxiedRepository2 ) );
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
//
// File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
//
// assertFalse( "Check failure", proxiedArtifactRepository.isCachedFailure( path ) );
}
public void testMetadataDrivenSnapshotNotPresentAlready()
throws ResourceDoesNotExistException, ProxyException, IOException
{
fail( "Implemented " + getName() );
// String path = "org/apache/maven/test/get-metadata-snapshot/1.0-SNAPSHOT/get-metadata-snapshot-1.0-20050831.101112-1.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
//
// expectedFile.delete();
// assertFalse( expectedFile.exists() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
}
public void testGetMetadataDrivenSnapshotRemoteUpdate()
throws ResourceDoesNotExistException, ProxyException, IOException, ParseException
{
fail( "Implemented " + getName() );
// Metadata driven snapshots (using a full timestamp) are treated like a release. It is the timing of the
// updates to the metadata files that triggers which will be downloaded
// String path = "org/apache/maven/test/get-present-metadata-snapshot/1.0-SNAPSHOT/get-present-metadata-snapshot-1.0-20050831.101112-1.jar";
// File expectedFile = new File( defaultManagedRepository.getBasedir(), path );
// String expectedContents = FileUtils.readFileToString( expectedFile, null );
//
// assertTrue( expectedFile.exists() );
//
// expectedFile.setLastModified( getPastDate().getTime() );
//
// File file = requestHandler.get( path, proxiedRepositories, defaultManagedRepository );
//
// assertEquals( "Check file matches", expectedFile, file );
// assertTrue( "Check file created", file.exists() );
// assertEquals( "Check file contents", expectedContents, FileUtils.readFileToString( file, null ) );
// File proxiedFile = new File( proxiedRepository1.getBasedir(), path );
// String unexpectedContents = FileUtils.readFileToString( proxiedFile, null );
// assertFalse( "Check file contents", unexpectedContents.equals( FileUtils.readFileToString( file, null ) ) );
}
}

View File

@ -31,6 +31,7 @@ import org.apache.maven.wagon.events.SessionListener;
import org.apache.maven.wagon.events.TransferListener;
import org.apache.maven.wagon.proxy.ProxyInfo;
import org.apache.maven.wagon.repository.Repository;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import java.io.File;
import java.io.IOException;
@ -42,6 +43,7 @@ import java.util.List;
* @author <a href="mailto:brett@apache.org">Brett Porter</a>
*/
public class WagonDelegate
extends AbstractLogEnabled
implements Wagon
{
private Wagon delegate;
@ -51,6 +53,7 @@ public class WagonDelegate
public void get( String resourceName, File destination )
throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
{
getLogger().debug( ".get(" + resourceName + ", " + destination + ")" );
delegate.get( resourceName, destination );
create( destination );
}
@ -58,6 +61,8 @@ public class WagonDelegate
public boolean getIfNewer( String resourceName, File destination, long timestamp )
throws TransferFailedException, ResourceDoesNotExistException, AuthorizationException
{
getLogger().debug( ".getIfNewer(" + resourceName + ", " + destination + ", " + timestamp + ")" );
boolean result = delegate.getIfNewer( resourceName, destination, timestamp );
createIfMissing( destination );
return result;

View File

@ -0,0 +1,47 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
<log4j:configuration xmlns:log4j="http://jakarta.apache.org/log4j/">
<appender name="console" class="org.apache.log4j.ConsoleAppender">
<param name="Target" value="System.out"/>
<layout class="org.apache.log4j.PatternLayout">
<param name="ConversionPattern" value="%5p|%t|%5r|%-30c{1} - %m%n"/>
</layout>
</appender>
<!-- Help identify bugs during testing -->
<logger name="org.apache.maven.archiva">
<level value="debug"/>
</logger>
<logger name="org.codehaus.plexus.security">
<level value="info"/>
</logger>
<logger name="org.codehaus.plexus.PlexusContainer">
<level value="info"/>
</logger>
<logger name="JPOX">
<level value="warn"/>
</logger>
<logger name="JPOX.MetaData">
<level value="error"/>
</logger>
<logger name="JPOX.RDBMS.SQL">
<level value="error"/>
</logger>
<logger name="SQL">
<level value="error"/>
</logger>
<root>
<priority value ="debug" />
<appender-ref ref="console" />
</root>
</log4j:configuration>

View File

@ -0,0 +1,72 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.wagon.Wagon</role>
<role-hint>test</role-hint>
<implementation>org.apache.maven.archiva.proxy.WagonDelegate</implementation>
</component>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<implementation>org.apache.maven.archiva.proxy.MockConfiguration</implementation>
</component>
<component>
<role>org.apache.maven.archiva.proxy.RepositoryProxyConnectors</role>
<role-hint>default</role-hint>
<implementation>org.apache.maven.archiva.proxy.DefaultRepositoryProxyConnectors</implementation>
<description>DefaultRepositoryProxyConnectors</description>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<field-name>archivaConfiguration</field-name>
</requirement>
<requirement>
<role>org.apache.maven.wagon.Wagon</role>
<field-name>wagons</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayoutFactory</role>
<field-name>layoutFactory</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
<field-name>preDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
<field-name>postDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
<role-hint>default</role-hint>
<field-name>urlFailureCache</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.logging.LoggerManager</role>
<implementation>org.codehaus.plexus.logging.slf4j.Slf4jLoggerManager</implementation>
<lifecycle-handler>basic</lifecycle-handler>
</component>
</components>
</component-set>

View File

@ -0,0 +1,72 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.wagon.Wagon</role>
<role-hint>test</role-hint>
<implementation>org.apache.maven.archiva.proxy.WagonDelegate</implementation>
</component>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<implementation>org.apache.maven.archiva.proxy.MockConfiguration</implementation>
</component>
<component>
<role>org.apache.maven.archiva.proxy.RepositoryProxyConnectors</role>
<role-hint>default</role-hint>
<implementation>org.apache.maven.archiva.proxy.DefaultRepositoryProxyConnectors</implementation>
<description>DefaultRepositoryProxyConnectors</description>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<field-name>archivaConfiguration</field-name>
</requirement>
<requirement>
<role>org.apache.maven.wagon.Wagon</role>
<field-name>wagons</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayoutFactory</role>
<field-name>layoutFactory</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
<field-name>preDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
<field-name>postDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
<role-hint>default</role-hint>
<field-name>urlFailureCache</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.logging.LoggerManager</role>
<implementation>org.codehaus.plexus.logging.slf4j.Slf4jLoggerManager</implementation>
<lifecycle-handler>basic</lifecycle-handler>
</component>
</components>
</component-set>

View File

@ -0,0 +1,72 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.wagon.Wagon</role>
<role-hint>test</role-hint>
<implementation>org.apache.maven.archiva.proxy.WagonDelegate</implementation>
</component>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<implementation>org.apache.maven.archiva.proxy.MockConfiguration</implementation>
</component>
<component>
<role>org.apache.maven.archiva.proxy.RepositoryProxyConnectors</role>
<role-hint>default</role-hint>
<implementation>org.apache.maven.archiva.proxy.DefaultRepositoryProxyConnectors</implementation>
<description>DefaultRepositoryProxyConnectors</description>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<field-name>archivaConfiguration</field-name>
</requirement>
<requirement>
<role>org.apache.maven.wagon.Wagon</role>
<field-name>wagons</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayoutFactory</role>
<field-name>layoutFactory</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
<field-name>preDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
<field-name>postDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
<role-hint>default</role-hint>
<field-name>urlFailureCache</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.logging.LoggerManager</role>
<implementation>org.codehaus.plexus.logging.slf4j.Slf4jLoggerManager</implementation>
<lifecycle-handler>basic</lifecycle-handler>
</component>
</components>
</component-set>

View File

@ -0,0 +1,72 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.wagon.Wagon</role>
<role-hint>test</role-hint>
<implementation>org.apache.maven.archiva.proxy.WagonDelegate</implementation>
</component>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<implementation>org.apache.maven.archiva.proxy.MockConfiguration</implementation>
</component>
<component>
<role>org.apache.maven.archiva.proxy.RepositoryProxyConnectors</role>
<role-hint>default</role-hint>
<implementation>org.apache.maven.archiva.proxy.DefaultRepositoryProxyConnectors</implementation>
<description>DefaultRepositoryProxyConnectors</description>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<field-name>archivaConfiguration</field-name>
</requirement>
<requirement>
<role>org.apache.maven.wagon.Wagon</role>
<field-name>wagons</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayoutFactory</role>
<field-name>layoutFactory</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
<field-name>preDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
<field-name>postDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
<role-hint>default</role-hint>
<field-name>urlFailureCache</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.logging.LoggerManager</role>
<implementation>org.codehaus.plexus.logging.slf4j.Slf4jLoggerManager</implementation>
<lifecycle-handler>basic</lifecycle-handler>
</component>
</components>
</component-set>

View File

@ -0,0 +1,72 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.wagon.Wagon</role>
<role-hint>test</role-hint>
<implementation>org.apache.maven.archiva.proxy.WagonDelegate</implementation>
</component>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<implementation>org.apache.maven.archiva.proxy.MockConfiguration</implementation>
</component>
<component>
<role>org.apache.maven.archiva.proxy.RepositoryProxyConnectors</role>
<role-hint>default</role-hint>
<implementation>org.apache.maven.archiva.proxy.DefaultRepositoryProxyConnectors</implementation>
<description>DefaultRepositoryProxyConnectors</description>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<field-name>archivaConfiguration</field-name>
</requirement>
<requirement>
<role>org.apache.maven.wagon.Wagon</role>
<field-name>wagons</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayoutFactory</role>
<field-name>layoutFactory</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
<field-name>preDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
<field-name>postDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
<role-hint>default</role-hint>
<field-name>urlFailureCache</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.logging.LoggerManager</role>
<implementation>org.codehaus.plexus.logging.slf4j.Slf4jLoggerManager</implementation>
<lifecycle-handler>basic</lifecycle-handler>
</component>
</components>
</component-set>

View File

@ -1,36 +0,0 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.wagon.Wagon</role>
<role-hint>test</role-hint>
<implementation>org.apache.maven.archiva.proxy.WagonDelegate</implementation>
</component>
<component>
<role>org.codehaus.plexus.logging.LoggerManager</role>
<implementation>org.codehaus.plexus.logging.console.ConsoleLoggerManager</implementation>
<lifecycle-handler>basic</lifecycle-handler>
<configuration>
<threshold>ERROR</threshold>
</configuration>
</component>
</components>
</component-set>

View File

@ -0,0 +1,72 @@
<!--
~ Licensed to the Apache Software Foundation (ASF) under one
~ or more contributor license agreements. See the NOTICE file
~ distributed with this work for additional information
~ regarding copyright ownership. The ASF licenses this file
~ to you under the Apache License, Version 2.0 (the
~ "License"); you may not use this file except in compliance
~ with the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing,
~ software distributed under the License is distributed on an
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
~ KIND, either express or implied. See the License for the
~ specific language governing permissions and limitations
~ under the License.
-->
<component-set>
<components>
<component>
<role>org.apache.maven.wagon.Wagon</role>
<role-hint>test</role-hint>
<implementation>org.apache.maven.archiva.proxy.WagonDelegate</implementation>
</component>
<component>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<implementation>org.apache.maven.archiva.proxy.MockConfiguration</implementation>
</component>
<component>
<role>org.apache.maven.archiva.proxy.RepositoryProxyConnectors</role>
<role-hint>default</role-hint>
<implementation>org.apache.maven.archiva.proxy.DefaultRepositoryProxyConnectors</implementation>
<description>DefaultRepositoryProxyConnectors</description>
<requirements>
<requirement>
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
<role-hint>mock</role-hint>
<field-name>archivaConfiguration</field-name>
</requirement>
<requirement>
<role>org.apache.maven.wagon.Wagon</role>
<field-name>wagons</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.repository.layout.BidirectionalRepositoryLayoutFactory</role>
<field-name>layoutFactory</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PreDownloadPolicy</role>
<field-name>preDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.PostDownloadPolicy</role>
<field-name>postDownloadPolicies</field-name>
</requirement>
<requirement>
<role>org.apache.maven.archiva.policies.urlcache.UrlFailureCache</role>
<role-hint>default</role-hint>
<field-name>urlFailureCache</field-name>
</requirement>
</requirements>
</component>
<component>
<role>org.codehaus.plexus.logging.LoggerManager</role>
<implementation>org.codehaus.plexus.logging.slf4j.Slf4jLoggerManager</implementation>
<lifecycle-handler>basic</lifecycle-handler>
</component>
</components>
</component-set>