mirror of https://github.com/apache/archiva.git
- improve cache, make it more map like and synchronized. Use JDK 1.4 collection LinkedHashMap instead of customised implementation.
- use new digester and clean up related code git-svn-id: https://svn.apache.org/repos/asf/maven/repository-manager/trunk@360474 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
50c62c3db7
commit
9a8157875a
|
@ -57,25 +57,9 @@
|
||||||
<groupId>org.apache.maven</groupId>
|
<groupId>org.apache.maven</groupId>
|
||||||
<artifactId>maven-artifact-manager</artifactId>
|
<artifactId>maven-artifact-manager</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!--
|
<dependency>
|
||||||
<dependency>
|
<groupId>org.apache.maven.repository</groupId>
|
||||||
<groupId>org.apache.maven.repository</groupId>
|
<artifactId>maven-repository-utils</artifactId>
|
||||||
<artifactId>maven-repository-discovery</artifactId>
|
</dependency>
|
||||||
</dependency>
|
|
||||||
-->
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<excludes>
|
|
||||||
<exclude>**/Abstract*.java</exclude>
|
|
||||||
<exclude>**/Test*.java</exclude>
|
|
||||||
</excludes>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -268,7 +268,6 @@ public class BadMetadataReportProcessor
|
||||||
{
|
{
|
||||||
boolean hasFailures = false;
|
boolean hasFailures = false;
|
||||||
Versioning versioning = metadata.getMetadata().getVersioning();
|
Versioning versioning = metadata.getMetadata().getVersioning();
|
||||||
// TODO: change this to look for repository artifacts. It needs to centre around that I think, currently this is hardwired to the default layout
|
|
||||||
File versionsDir =
|
File versionsDir =
|
||||||
new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
|
new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile();
|
||||||
List versions = FileUtils.getFileNames( versionsDir, "*/*.pom", null, false );
|
List versions = FileUtils.getFileNames( versionsDir, "*/*.pom", null, false );
|
||||||
|
|
|
@ -16,21 +16,20 @@ package org.apache.maven.repository.reporting;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.util.Iterator;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class to implement caching
|
* Class to implement caching.
|
||||||
*/
|
*/
|
||||||
public class Cache
|
public class Cache
|
||||||
{
|
{
|
||||||
private Map cache;
|
private final Map cache;
|
||||||
|
|
||||||
private DblLinkedList mostRecent;
|
private final double cacheHitRatio;
|
||||||
|
|
||||||
private double cacheHitRatio;
|
private final int cacheMaxSize;
|
||||||
|
|
||||||
private long cacheMaxSize;
|
|
||||||
|
|
||||||
private long cacheHits;
|
private long cacheHits;
|
||||||
|
|
||||||
|
@ -47,7 +46,7 @@ public class Cache
|
||||||
/**
|
/**
|
||||||
* Caches all data and expires only the oldest data when the maximum cache size is reached
|
* Caches all data and expires only the oldest data when the maximum cache size is reached
|
||||||
*/
|
*/
|
||||||
public Cache( long cacheMaxSize )
|
public Cache( int cacheMaxSize )
|
||||||
{
|
{
|
||||||
this( (double) 1, cacheMaxSize );
|
this( (double) 1, cacheMaxSize );
|
||||||
}
|
}
|
||||||
|
@ -56,12 +55,19 @@ public class Cache
|
||||||
* Caches all data and expires only the oldest data when either the specified cache hit rate is reached
|
* Caches all data and expires only the oldest data when either the specified cache hit rate is reached
|
||||||
* or the maximum cache size is reached.
|
* or the maximum cache size is reached.
|
||||||
*/
|
*/
|
||||||
public Cache( double cacheHitRatio, long cacheMaxSize )
|
public Cache( double cacheHitRatio, int cacheMaxSize )
|
||||||
{
|
{
|
||||||
this.cacheHitRatio = cacheHitRatio;
|
this.cacheHitRatio = cacheHitRatio;
|
||||||
this.cacheMaxSize = cacheMaxSize;
|
this.cacheMaxSize = cacheMaxSize;
|
||||||
|
|
||||||
cache = new HashMap();
|
if ( cacheMaxSize > 0 )
|
||||||
|
{
|
||||||
|
cache = new LinkedHashMap( cacheMaxSize );
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
cache = new LinkedHashMap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -72,15 +78,19 @@ public class Cache
|
||||||
*/
|
*/
|
||||||
public boolean containsKey( Object key )
|
public boolean containsKey( Object key )
|
||||||
{
|
{
|
||||||
boolean contains = cache.containsKey( key );
|
boolean contains;
|
||||||
|
synchronized ( cache )
|
||||||
|
{
|
||||||
|
contains = cache.containsKey( key );
|
||||||
|
|
||||||
if ( contains )
|
if ( contains )
|
||||||
{
|
{
|
||||||
cacheHits++;
|
cacheHits++;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
cacheMiss++;
|
cacheMiss++;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return contains;
|
return contains;
|
||||||
|
@ -96,19 +106,21 @@ public class Cache
|
||||||
{
|
{
|
||||||
Object retValue = null;
|
Object retValue = null;
|
||||||
|
|
||||||
if ( cache.containsKey( key ) )
|
synchronized ( cache )
|
||||||
{
|
{
|
||||||
DblLinkedList cacheEntry = (DblLinkedList) cache.get( key );
|
if ( cache.containsKey( key ) )
|
||||||
|
{
|
||||||
|
// remove and put: this promotes it to the top since we use a linked hash map
|
||||||
|
retValue = cache.remove( key );
|
||||||
|
|
||||||
makeMostRecent( cacheEntry );
|
cache.put( key, retValue );
|
||||||
|
|
||||||
retValue = cacheEntry.getCacheValue();
|
cacheHits++;
|
||||||
|
}
|
||||||
cacheHits++;
|
else
|
||||||
}
|
{
|
||||||
else
|
cacheMiss++;
|
||||||
{
|
}
|
||||||
cacheMiss++;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return retValue;
|
return retValue;
|
||||||
|
@ -120,21 +132,24 @@ public class Cache
|
||||||
* @param key the object to map the valued object
|
* @param key the object to map the valued object
|
||||||
* @param value the object to cache
|
* @param value the object to cache
|
||||||
*/
|
*/
|
||||||
public void put( Object key, Object value )
|
public Object put( Object key, Object value )
|
||||||
{
|
{
|
||||||
DblLinkedList entry;
|
Object old = null;
|
||||||
if ( !cache.containsKey( key ) )
|
|
||||||
|
// remove and put: this promotes it to the top since we use a linked hash map
|
||||||
|
synchronized ( cache )
|
||||||
{
|
{
|
||||||
entry = new DblLinkedList( key, value );
|
if ( cache.containsKey( key ) )
|
||||||
cache.put( key, entry );
|
{
|
||||||
manageCache();
|
old = cache.remove( key );
|
||||||
}
|
}
|
||||||
else
|
|
||||||
{
|
cache.put( key, value );
|
||||||
entry = (DblLinkedList) cache.get( key );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
makeMostRecent( entry );
|
manageCache();
|
||||||
|
|
||||||
|
return old;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -144,13 +159,16 @@ public class Cache
|
||||||
*/
|
*/
|
||||||
public double getHitRate()
|
public double getHitRate()
|
||||||
{
|
{
|
||||||
return cacheHits == 0 && cacheMiss == 0 ? 0 : (double) cacheHits / (double) ( cacheHits + cacheMiss );
|
synchronized ( cache )
|
||||||
|
{
|
||||||
|
return cacheHits == 0 && cacheMiss == 0 ? 0 : (double) cacheHits / (double) ( cacheHits + cacheMiss );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the total number of cache objects currently cached.
|
* Get the total number of cache objects currently cached.
|
||||||
*/
|
*/
|
||||||
public long size()
|
public int size()
|
||||||
{
|
{
|
||||||
return cache.size();
|
return cache.size();
|
||||||
}
|
}
|
||||||
|
@ -158,156 +176,50 @@ public class Cache
|
||||||
/**
|
/**
|
||||||
* Empty the cache and reset the cache hit rate
|
* Empty the cache and reset the cache hit rate
|
||||||
*/
|
*/
|
||||||
public void flush()
|
public void clear()
|
||||||
{
|
{
|
||||||
while ( cache.size() > 0 )
|
synchronized ( cache )
|
||||||
{
|
{
|
||||||
trimCache();
|
cacheHits = 0;
|
||||||
|
cacheMiss = 0;
|
||||||
|
cache.clear();
|
||||||
}
|
}
|
||||||
cacheHits = 0;
|
|
||||||
cacheMiss = 0;
|
|
||||||
cache = new HashMap();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void makeMostRecent( DblLinkedList list )
|
|
||||||
{
|
|
||||||
if ( mostRecent != null )
|
|
||||||
{
|
|
||||||
if ( !mostRecent.equals( list ) )
|
|
||||||
{
|
|
||||||
removeFromLinks( list );
|
|
||||||
|
|
||||||
list.setNext( mostRecent );
|
|
||||||
mostRecent.setPrev( list );
|
|
||||||
|
|
||||||
mostRecent = list;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if ( list != null )
|
|
||||||
{
|
|
||||||
removeFromLinks( list );
|
|
||||||
|
|
||||||
mostRecent = list;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void removeFromLinks( DblLinkedList list )
|
|
||||||
{
|
|
||||||
if ( list.getPrev() != null )
|
|
||||||
{
|
|
||||||
list.getPrev().setNext( list.getNext() );
|
|
||||||
}
|
|
||||||
if ( list.getNext() != null )
|
|
||||||
{
|
|
||||||
list.getNext().setPrev( list.getPrev() );
|
|
||||||
}
|
|
||||||
|
|
||||||
list.setPrev( null );
|
|
||||||
list.setNext( null );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void manageCache()
|
private void manageCache()
|
||||||
{
|
{
|
||||||
if ( cacheMaxSize == 0 )
|
synchronized ( cache )
|
||||||
{
|
{
|
||||||
//desired HitRatio is reached, we can trim the cache to conserve memory
|
Iterator iterator = cache.entrySet().iterator();
|
||||||
if ( cacheHitRatio <= getHitRate() )
|
if ( cacheMaxSize == 0 )
|
||||||
{
|
{
|
||||||
trimCache();
|
//desired HitRatio is reached, we can trim the cache to conserve memory
|
||||||
|
if ( cacheHitRatio <= getHitRate() )
|
||||||
|
{
|
||||||
|
iterator.next();
|
||||||
|
iterator.remove();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
else if ( cache.size() > cacheMaxSize )
|
||||||
else if ( cache.size() > cacheMaxSize )
|
|
||||||
{
|
|
||||||
// maximum cache size is reached
|
|
||||||
while ( cache.size() > cacheMaxSize )
|
|
||||||
{
|
{
|
||||||
trimCache();
|
// maximum cache size is reached
|
||||||
|
while ( cache.size() > cacheMaxSize )
|
||||||
|
{
|
||||||
|
iterator.next();
|
||||||
|
iterator.remove();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
else
|
||||||
else
|
|
||||||
{
|
|
||||||
//even though the max has not been reached, the desired HitRatio is already reached,
|
|
||||||
// so we can trim the cache to conserve memory
|
|
||||||
if ( cacheHitRatio <= getHitRate() )
|
|
||||||
{
|
{
|
||||||
trimCache();
|
//even though the max has not been reached, the desired HitRatio is already reached,
|
||||||
|
// so we can trim the cache to conserve memory
|
||||||
|
if ( cacheHitRatio <= getHitRate() )
|
||||||
|
{
|
||||||
|
iterator.next();
|
||||||
|
iterator.remove();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void trimCache()
|
|
||||||
{
|
|
||||||
DblLinkedList leastRecent = getLeastRecent();
|
|
||||||
cache.remove( leastRecent.getCacheKey() );
|
|
||||||
if ( cache.size() > 0 )
|
|
||||||
{
|
|
||||||
removeFromLinks( leastRecent );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
mostRecent = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private DblLinkedList getLeastRecent()
|
|
||||||
{
|
|
||||||
DblLinkedList trail = mostRecent;
|
|
||||||
|
|
||||||
while ( trail.getNext() != null )
|
|
||||||
{
|
|
||||||
trail = trail.getNext();
|
|
||||||
}
|
|
||||||
|
|
||||||
return trail;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @todo replace with standard collection (commons-collections?)
|
|
||||||
*/
|
|
||||||
private static class DblLinkedList
|
|
||||||
{
|
|
||||||
private Object cacheKey;
|
|
||||||
|
|
||||||
private Object cacheValue;
|
|
||||||
|
|
||||||
private DblLinkedList prev;
|
|
||||||
|
|
||||||
private DblLinkedList next;
|
|
||||||
|
|
||||||
DblLinkedList( Object key, Object value )
|
|
||||||
{
|
|
||||||
this.cacheKey = key;
|
|
||||||
this.cacheValue = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
public DblLinkedList getNext()
|
|
||||||
{
|
|
||||||
return next;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Object getCacheValue()
|
|
||||||
{
|
|
||||||
return cacheValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setPrev( DblLinkedList prev )
|
|
||||||
{
|
|
||||||
this.prev = prev;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setNext( DblLinkedList next )
|
|
||||||
{
|
|
||||||
this.next = next;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Object getCacheKey()
|
|
||||||
{
|
|
||||||
return cacheKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
public DblLinkedList getPrev()
|
|
||||||
{
|
|
||||||
return prev;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,7 +49,6 @@ public class CachedRepositoryQueryLayer
|
||||||
{
|
{
|
||||||
boolean artifactFound = true;
|
boolean artifactFound = true;
|
||||||
|
|
||||||
// @todo should check for snapshot artifacts
|
|
||||||
String artifactPath = repository.getBasedir() + "/" + repository.pathOf( artifact );
|
String artifactPath = repository.getBasedir() + "/" + repository.pathOf( artifact );
|
||||||
|
|
||||||
if ( cache.get( artifactPath ) == null )
|
if ( cache.get( artifactPath ) == null )
|
||||||
|
|
|
@ -18,23 +18,18 @@ package org.apache.maven.repository.reporting;
|
||||||
|
|
||||||
import org.apache.maven.artifact.Artifact;
|
import org.apache.maven.artifact.Artifact;
|
||||||
import org.apache.maven.artifact.repository.ArtifactRepository;
|
import org.apache.maven.artifact.repository.ArtifactRepository;
|
||||||
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
|
|
||||||
import org.apache.maven.model.Model;
|
import org.apache.maven.model.Model;
|
||||||
|
import org.apache.maven.repository.digest.Digester;
|
||||||
import org.codehaus.plexus.util.FileUtils;
|
import org.codehaus.plexus.util.FileUtils;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
|
||||||
import java.security.MessageDigest;
|
|
||||||
import java.security.NoSuchAlgorithmException;
|
import java.security.NoSuchAlgorithmException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class reports invalid and mismatched checksums of artifacts and metadata files.
|
* This class reports invalid and mismatched checksums of artifacts and metadata files.
|
||||||
* It validates MD5 and SHA-1 checksums.
|
* It validates MD5 and SHA-1 checksums.
|
||||||
*
|
*
|
||||||
* @todo remove stateful parts, change to singleton instantiation
|
|
||||||
* @plexus.component role="org.apache.maven.repository.reporting.ArtifactReportProcessor" role-hint="checksum"
|
* @plexus.component role="org.apache.maven.repository.reporting.ArtifactReportProcessor" role-hint="checksum"
|
||||||
*/
|
*/
|
||||||
public class ChecksumArtifactReporter
|
public class ChecksumArtifactReporter
|
||||||
|
@ -44,6 +39,9 @@ public class ChecksumArtifactReporter
|
||||||
|
|
||||||
private static final int CHECKSUM_BUFFER_SIZE = 16384;
|
private static final int CHECKSUM_BUFFER_SIZE = 16384;
|
||||||
|
|
||||||
|
/** @plexus.requirement */
|
||||||
|
private Digester digester;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate the checksum of the specified artifact.
|
* Validate the checksum of the specified artifact.
|
||||||
*
|
*
|
||||||
|
@ -71,7 +69,7 @@ public class ChecksumArtifactReporter
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
if ( validateChecksum( file, md5File, "MD5" ) )
|
if ( digester.verifyChecksum( file, FileUtils.fileRead( md5File ), Digester.MD5 ) )
|
||||||
{
|
{
|
||||||
reporter.addSuccess( artifact );
|
reporter.addSuccess( artifact );
|
||||||
}
|
}
|
||||||
|
@ -99,7 +97,7 @@ public class ChecksumArtifactReporter
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
if ( validateChecksum( file, sha1File, "SHA-1" ) )
|
if ( digester.verifyChecksum( file, FileUtils.fileRead( sha1File ), Digester.SHA1 ) )
|
||||||
{
|
{
|
||||||
reporter.addSuccess( artifact );
|
reporter.addSuccess( artifact );
|
||||||
}
|
}
|
||||||
|
@ -122,169 +120,4 @@ public class ChecksumArtifactReporter
|
||||||
reporter.addFailure( artifact, "SHA-1 checksum file does not exist." );
|
reporter.addFailure( artifact, "SHA-1 checksum file does not exist." );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate the checksums of the metadata. Get the metadata file from the
|
|
||||||
* repository then validate the checksum.
|
|
||||||
*/
|
|
||||||
public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
|
|
||||||
{
|
|
||||||
if ( !"file".equals( repository.getProtocol() ) )
|
|
||||||
{
|
|
||||||
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
|
|
||||||
throw new UnsupportedOperationException(
|
|
||||||
"Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
|
|
||||||
}
|
|
||||||
|
|
||||||
//check if checksum files exist
|
|
||||||
String path = repository.pathOfRemoteRepositoryMetadata( metadata );
|
|
||||||
File file = new File( repository.getBasedir(), path );
|
|
||||||
|
|
||||||
File md5File = new File( repository.getBasedir(), path + ".md5" );
|
|
||||||
if ( md5File.exists() )
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if ( validateChecksum( file, md5File, "MD5" ) )
|
|
||||||
{
|
|
||||||
reporter.addSuccess( metadata );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "MD5 checksum does not match." );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch ( NoSuchAlgorithmException e )
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "Unable to read MD5: " + e.getMessage() );
|
|
||||||
}
|
|
||||||
catch ( IOException e )
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "Unable to read MD5: " + e.getMessage() );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "MD5 checksum file does not exist." );
|
|
||||||
}
|
|
||||||
|
|
||||||
File sha1File = new File( repository.getBasedir(), path + ".sha1" );
|
|
||||||
if ( sha1File.exists() )
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if ( validateChecksum( file, sha1File, "SHA-1" ) )
|
|
||||||
{
|
|
||||||
reporter.addSuccess( metadata );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "SHA-1 checksum does not match." );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch ( NoSuchAlgorithmException e )
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "Unable to read SHA1: " + e.getMessage() );
|
|
||||||
}
|
|
||||||
catch ( IOException e )
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "Unable to read SHA1: " + e.getMessage() );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "SHA-1 checksum file does not exist." );
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Validate the checksum of the file.
|
|
||||||
*
|
|
||||||
* @param file The file to be validated.
|
|
||||||
* @param checksumFile the checksum to validate against
|
|
||||||
* @param algo The checksum algorithm used.
|
|
||||||
*/
|
|
||||||
private boolean validateChecksum( File file, File checksumFile, String algo )
|
|
||||||
throws NoSuchAlgorithmException, IOException
|
|
||||||
{
|
|
||||||
//Create checksum for jar file
|
|
||||||
byte[] chk1 = createChecksum( file, algo );
|
|
||||||
//read the checksum file
|
|
||||||
String checksum = FileUtils.fileRead( checksumFile );
|
|
||||||
|
|
||||||
return checksum.toUpperCase().equals( byteArrayToHexStr( chk1 ).toUpperCase() );
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a checksum from the specified metadata file.
|
|
||||||
*
|
|
||||||
* @param file The file that will be created a checksum.
|
|
||||||
* @param algo The algorithm to be used (MD5, SHA-1)
|
|
||||||
* @return
|
|
||||||
* @throws FileNotFoundException
|
|
||||||
* @throws NoSuchAlgorithmException
|
|
||||||
* @throws IOException
|
|
||||||
* @todo move to utility class
|
|
||||||
*/
|
|
||||||
private static byte[] createChecksum( File file, String algo )
|
|
||||||
throws FileNotFoundException, NoSuchAlgorithmException, IOException
|
|
||||||
{
|
|
||||||
MessageDigest digest = MessageDigest.getInstance( algo );
|
|
||||||
|
|
||||||
InputStream fis = new FileInputStream( file );
|
|
||||||
try
|
|
||||||
{
|
|
||||||
byte[] buffer = new byte[CHECKSUM_BUFFER_SIZE];
|
|
||||||
int numRead;
|
|
||||||
do
|
|
||||||
{
|
|
||||||
numRead = fis.read( buffer );
|
|
||||||
if ( numRead > 0 )
|
|
||||||
{
|
|
||||||
digest.update( buffer, 0, numRead );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
while ( numRead != -1 );
|
|
||||||
}
|
|
||||||
finally
|
|
||||||
{
|
|
||||||
fis.close();
|
|
||||||
}
|
|
||||||
|
|
||||||
return digest.digest();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert an incoming array of bytes into a string that represents each of
|
|
||||||
* the bytes as two hex characters.
|
|
||||||
*
|
|
||||||
* @param data
|
|
||||||
* @todo move to utilities
|
|
||||||
*/
|
|
||||||
public static String byteArrayToHexStr( byte[] data )
|
|
||||||
{
|
|
||||||
String output = "";
|
|
||||||
|
|
||||||
for ( int cnt = 0; cnt < data.length; cnt++ )
|
|
||||||
{
|
|
||||||
//Deposit a byte into the 8 lsb of an int.
|
|
||||||
int tempInt = data[cnt] & BYTE_MASK;
|
|
||||||
|
|
||||||
//Get hex representation of the int as a string.
|
|
||||||
String tempStr = Integer.toHexString( tempInt );
|
|
||||||
|
|
||||||
//Append a leading 0 if necessary so that each hex string will contain 2 characters.
|
|
||||||
if ( tempStr.length() == 1 )
|
|
||||||
{
|
|
||||||
tempStr = "0" + tempStr;
|
|
||||||
}
|
|
||||||
|
|
||||||
//Concatenate the two characters to the output string.
|
|
||||||
output = output + tempStr;
|
|
||||||
}
|
|
||||||
|
|
||||||
return output.toUpperCase();
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,30 +16,100 @@ package org.apache.maven.repository.reporting;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.maven.artifact.Artifact;
|
|
||||||
import org.apache.maven.artifact.repository.ArtifactRepository;
|
import org.apache.maven.artifact.repository.ArtifactRepository;
|
||||||
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
|
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
|
||||||
import org.apache.maven.model.Model;
|
import org.apache.maven.repository.digest.Digester;
|
||||||
|
import org.codehaus.plexus.util.FileUtils;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.net.URL;
|
|
||||||
import java.security.MessageDigest;
|
|
||||||
import java.security.NoSuchAlgorithmException;
|
import java.security.NoSuchAlgorithmException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class reports invalid and mismatched checksums of artifacts and metadata files.
|
* This class reports invalid and mismatched checksums of artifacts and metadata files.
|
||||||
* It validates MD5 and SHA-1 checksums.
|
* It validates MD5 and SHA-1 checksums.
|
||||||
*
|
*
|
||||||
* @todo remove this duplicate class for the parent using one role
|
* @plexus.component role="org.apache.maven.repository.reporting.MetadataReportProcessor" role-hint="checksum-metadata"
|
||||||
* @plexus.component role="org.apache.maven.repository.reporting.MetadataReportProcessor" role-hint="checksum-metadata" instantiation-strategy="per-lookup"
|
|
||||||
*/
|
*/
|
||||||
public class ChecksumMetadataReporter
|
public class ChecksumMetadataReporter
|
||||||
extends ChecksumArtifactReporter
|
|
||||||
implements MetadataReportProcessor
|
implements MetadataReportProcessor
|
||||||
{
|
{
|
||||||
|
/** @plexus.requirement */
|
||||||
|
private Digester digester;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate the checksums of the metadata. Get the metadata file from the
|
||||||
|
* repository then validate the checksum.
|
||||||
|
*/
|
||||||
|
public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter )
|
||||||
|
{
|
||||||
|
if ( !"file".equals( repository.getProtocol() ) )
|
||||||
|
{
|
||||||
|
// We can't check other types of URLs yet. Need to use Wagon, with an exists() method.
|
||||||
|
throw new UnsupportedOperationException(
|
||||||
|
"Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" );
|
||||||
|
}
|
||||||
|
|
||||||
|
//check if checksum files exist
|
||||||
|
String path = repository.pathOfRemoteRepositoryMetadata( metadata );
|
||||||
|
File file = new File( repository.getBasedir(), path );
|
||||||
|
|
||||||
|
File md5File = new File( repository.getBasedir(), path + ".md5" );
|
||||||
|
if ( md5File.exists() )
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if ( digester.verifyChecksum( file, FileUtils.fileRead( md5File ), Digester.MD5 ) )
|
||||||
|
{
|
||||||
|
reporter.addSuccess( metadata );
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, "MD5 checksum does not match." );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch ( NoSuchAlgorithmException e )
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, "Unable to read MD5: " + e.getMessage() );
|
||||||
|
}
|
||||||
|
catch ( IOException e )
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, "Unable to read MD5: " + e.getMessage() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, "MD5 checksum file does not exist." );
|
||||||
|
}
|
||||||
|
|
||||||
|
File sha1File = new File( repository.getBasedir(), path + ".sha1" );
|
||||||
|
if ( sha1File.exists() )
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if ( digester.verifyChecksum( file, FileUtils.fileRead( sha1File ), Digester.SHA1 ) )
|
||||||
|
{
|
||||||
|
reporter.addSuccess( metadata );
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, "SHA-1 checksum does not match." );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch ( NoSuchAlgorithmException e )
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, "Unable to read SHA1: " + e.getMessage() );
|
||||||
|
}
|
||||||
|
catch ( IOException e )
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, "Unable to read SHA1: " + e.getMessage() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, "SHA-1 checksum file does not exist." );
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,7 +42,6 @@ public class InvalidPomArtifactReportProcessor
|
||||||
* @param artifact The pom xml file to be validated, passed as an artifact object.
|
* @param artifact The pom xml file to be validated, passed as an artifact object.
|
||||||
* @param reporter The artifact reporter object.
|
* @param reporter The artifact reporter object.
|
||||||
* @param repository the repository where the artifact is located.
|
* @param repository the repository where the artifact is located.
|
||||||
* @todo fix repo paths
|
|
||||||
*/
|
*/
|
||||||
public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
|
public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter,
|
||||||
ArtifactRepository repository )
|
ArtifactRepository repository )
|
||||||
|
|
|
@ -28,12 +28,9 @@ public interface RepositoryQueryLayer
|
||||||
{
|
{
|
||||||
String ROLE = RepositoryQueryLayer.class.getName();
|
String ROLE = RepositoryQueryLayer.class.getName();
|
||||||
|
|
||||||
boolean ARTIFACT_FOUND = true;
|
|
||||||
|
|
||||||
boolean ARTIFACT_NOT_FOUND = false;
|
|
||||||
|
|
||||||
boolean containsArtifact( Artifact artifact );
|
boolean containsArtifact( Artifact artifact );
|
||||||
|
|
||||||
|
/** @todo I believe we can remove this [BP] - artifact should contain all the necessary version info */
|
||||||
boolean containsArtifact( Artifact artifact, Snapshot snapshot );
|
boolean containsArtifact( Artifact artifact, Snapshot snapshot );
|
||||||
|
|
||||||
List getVersions( Artifact artifact )
|
List getVersions( Artifact artifact )
|
||||||
|
|
|
@ -16,21 +16,18 @@ package org.apache.maven.repository.reporting;
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import org.apache.maven.repository.digest.Digester;
|
||||||
import org.codehaus.plexus.util.FileUtils;
|
import org.codehaus.plexus.util.FileUtils;
|
||||||
import org.codehaus.plexus.util.IOUtil;
|
import org.codehaus.plexus.util.IOUtil;
|
||||||
|
|
||||||
import java.io.BufferedOutputStream;
|
import java.io.BufferedOutputStream;
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
|
||||||
import java.io.FileNotFoundException;
|
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
import java.io.FileReader;
|
import java.io.FileReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStream;
|
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.io.OutputStreamWriter;
|
import java.io.OutputStreamWriter;
|
||||||
import java.security.MessageDigest;
|
|
||||||
import java.security.NoSuchAlgorithmException;
|
import java.security.NoSuchAlgorithmException;
|
||||||
import java.util.jar.JarEntry;
|
import java.util.jar.JarEntry;
|
||||||
import java.util.jar.JarOutputStream;
|
import java.util.jar.JarOutputStream;
|
||||||
|
@ -48,12 +45,14 @@ public abstract class AbstractChecksumArtifactReporterTestCase
|
||||||
|
|
||||||
private static final String metadataChecksumFilename = "maven-metadata";
|
private static final String metadataChecksumFilename = "maven-metadata";
|
||||||
|
|
||||||
private static final int CHECKSUM_BUFFER_SIZE = 256;
|
private Digester digester;
|
||||||
|
|
||||||
public void setUp()
|
public void setUp()
|
||||||
throws Exception
|
throws Exception
|
||||||
{
|
{
|
||||||
super.setUp();
|
super.setUp();
|
||||||
|
|
||||||
|
digester = (Digester) lookup( Digester.ROLE );
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -122,9 +121,9 @@ public abstract class AbstractChecksumArtifactReporterTestCase
|
||||||
|
|
||||||
if ( dirFiles.mkdirs() )
|
if ( dirFiles.mkdirs() )
|
||||||
{
|
{
|
||||||
|
|
||||||
// create a jar file
|
// create a jar file
|
||||||
FileOutputStream f = new FileOutputStream( repoUrl + relativePath + dirs + "/" + filename + "." + type );
|
String path = repoUrl + relativePath + dirs + "/" + filename + "." + type;
|
||||||
|
FileOutputStream f = new FileOutputStream( path );
|
||||||
JarOutputStream out = new JarOutputStream( new BufferedOutputStream( f ) );
|
JarOutputStream out = new JarOutputStream( new BufferedOutputStream( f ) );
|
||||||
|
|
||||||
// jar sample.txt
|
// jar sample.txt
|
||||||
|
@ -138,42 +137,34 @@ public abstract class AbstractChecksumArtifactReporterTestCase
|
||||||
out.close();
|
out.close();
|
||||||
|
|
||||||
//Create md5 and sha-1 checksum files..
|
//Create md5 and sha-1 checksum files..
|
||||||
byte[] md5chk = createChecksum( repoUrl + relativePath + dirs + "/" + filename + "." + type, "MD5" );
|
|
||||||
byte[] sha1chk = createChecksum( repoUrl + relativePath + dirs + "/" + filename + "." + type, "SHA-1" );
|
|
||||||
|
|
||||||
File file;
|
File file = new File( path + ".md5" );
|
||||||
|
OutputStream os = new FileOutputStream( file );
|
||||||
if ( md5chk != null )
|
OutputStreamWriter osw = new OutputStreamWriter( os );
|
||||||
|
String sum = digester.createChecksum( new File( path ), Digester.MD5 );
|
||||||
|
if ( !isValid )
|
||||||
{
|
{
|
||||||
file = new File( repoUrl + relativePath + dirs + "/" + filename + "." + type + ".md5" );
|
osw.write( sum + "1" );
|
||||||
OutputStream os = new FileOutputStream( file );
|
|
||||||
OutputStreamWriter osw = new OutputStreamWriter( os );
|
|
||||||
if ( !isValid )
|
|
||||||
{
|
|
||||||
osw.write( ChecksumArtifactReporter.byteArrayToHexStr( md5chk ) + "1" );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
osw.write( ChecksumArtifactReporter.byteArrayToHexStr( md5chk ) );
|
|
||||||
}
|
|
||||||
osw.close();
|
|
||||||
}
|
}
|
||||||
|
else
|
||||||
if ( sha1chk != null )
|
|
||||||
{
|
{
|
||||||
file = new File( repoUrl + relativePath + dirs + "/" + filename + "." + type + ".sha1" );
|
osw.write( sum );
|
||||||
OutputStream os = new FileOutputStream( file );
|
|
||||||
OutputStreamWriter osw = new OutputStreamWriter( os );
|
|
||||||
if ( !isValid )
|
|
||||||
{
|
|
||||||
osw.write( ChecksumArtifactReporter.byteArrayToHexStr( sha1chk ) + "2" );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
osw.write( ChecksumArtifactReporter.byteArrayToHexStr( sha1chk ) );
|
|
||||||
}
|
|
||||||
osw.close();
|
|
||||||
}
|
}
|
||||||
|
osw.close();
|
||||||
|
|
||||||
|
file = new File( path + ".sha1" );
|
||||||
|
os = new FileOutputStream( file );
|
||||||
|
osw = new OutputStreamWriter( os );
|
||||||
|
String sha1sum = digester.createChecksum( new File( path ), Digester.SHA1 );
|
||||||
|
if ( !isValid )
|
||||||
|
{
|
||||||
|
osw.write( sha1sum + "2" );
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
osw.write( sha1sum );
|
||||||
|
}
|
||||||
|
osw.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -192,45 +183,38 @@ public abstract class AbstractChecksumArtifactReporterTestCase
|
||||||
String repoUrl = repository.getBasedir();
|
String repoUrl = repository.getBasedir();
|
||||||
String url = repository.getBasedir() + "/" + filename + "." + type;
|
String url = repository.getBasedir() + "/" + filename + "." + type;
|
||||||
|
|
||||||
FileUtils.copyFile( new File( url ), new File( repoUrl + relativePath + filename + "." + type ) );
|
String path = repoUrl + relativePath + filename + "." + type;
|
||||||
|
FileUtils.copyFile( new File( url ), new File( path ) );
|
||||||
|
|
||||||
//Create md5 and sha-1 checksum files..
|
//Create md5 and sha-1 checksum files..
|
||||||
byte[] md5chk = createChecksum( repoUrl + relativePath + filename + "." + type, "MD5" );
|
|
||||||
byte[] sha1chk = createChecksum( repoUrl + relativePath + filename + "." + type, "SHA-1" );
|
|
||||||
|
|
||||||
File file;
|
File file = new File( path + ".md5" );
|
||||||
|
OutputStream os = new FileOutputStream( file );
|
||||||
if ( md5chk != null )
|
OutputStreamWriter osw = new OutputStreamWriter( os );
|
||||||
|
String md5sum = digester.createChecksum( new File( path ), Digester.MD5 );
|
||||||
|
if ( !isValid )
|
||||||
{
|
{
|
||||||
file = new File( repoUrl + relativePath + filename + "." + type + ".md5" );
|
osw.write( md5sum + "1" );
|
||||||
OutputStream os = new FileOutputStream( file );
|
|
||||||
OutputStreamWriter osw = new OutputStreamWriter( os );
|
|
||||||
if ( !isValid )
|
|
||||||
{
|
|
||||||
osw.write( ChecksumArtifactReporter.byteArrayToHexStr( md5chk ) + "1" );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
osw.write( ChecksumArtifactReporter.byteArrayToHexStr( md5chk ) );
|
|
||||||
}
|
|
||||||
osw.close();
|
|
||||||
}
|
}
|
||||||
|
else
|
||||||
if ( sha1chk != null )
|
|
||||||
{
|
{
|
||||||
file = new File( repoUrl + relativePath + filename + "." + type + ".sha1" );
|
osw.write( md5sum );
|
||||||
OutputStream os = new FileOutputStream( file );
|
|
||||||
OutputStreamWriter osw = new OutputStreamWriter( os );
|
|
||||||
if ( !isValid )
|
|
||||||
{
|
|
||||||
osw.write( ChecksumArtifactReporter.byteArrayToHexStr( sha1chk ) + "2" );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
osw.write( ChecksumArtifactReporter.byteArrayToHexStr( sha1chk ) );
|
|
||||||
}
|
|
||||||
osw.close();
|
|
||||||
}
|
}
|
||||||
|
osw.close();
|
||||||
|
|
||||||
|
file = new File( path + ".sha1" );
|
||||||
|
os = new FileOutputStream( file );
|
||||||
|
osw = new OutputStreamWriter( os );
|
||||||
|
String sha1sum = digester.createChecksum( new File( path ), Digester.SHA1 );
|
||||||
|
if ( !isValid )
|
||||||
|
{
|
||||||
|
osw.write( sha1sum + "2" );
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
osw.write( sha1sum );
|
||||||
|
}
|
||||||
|
osw.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -248,37 +232,6 @@ public abstract class AbstractChecksumArtifactReporterTestCase
|
||||||
osw.close();
|
osw.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a checksum from the specified metadata file.
|
|
||||||
*
|
|
||||||
* @throws FileNotFoundException
|
|
||||||
* @throws NoSuchAlgorithmException
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
private byte[] createChecksum( String filename, String algo )
|
|
||||||
throws FileNotFoundException, NoSuchAlgorithmException, IOException
|
|
||||||
{
|
|
||||||
|
|
||||||
// TODO: share with ArtifactRepositoryIndex.getChecksum(), ChecksumArtifactReporter.getChecksum()
|
|
||||||
InputStream fis = new FileInputStream( filename );
|
|
||||||
byte[] buffer = new byte[CHECKSUM_BUFFER_SIZE];
|
|
||||||
|
|
||||||
MessageDigest complete = MessageDigest.getInstance( algo );
|
|
||||||
int numRead;
|
|
||||||
do
|
|
||||||
{
|
|
||||||
numRead = fis.read( buffer );
|
|
||||||
if ( numRead > 0 )
|
|
||||||
{
|
|
||||||
complete.update( buffer, 0, numRead );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
while ( numRead != -1 );
|
|
||||||
fis.close();
|
|
||||||
|
|
||||||
return complete.digest();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Delete the test directory created in the repository.
|
* Delete the test directory created in the repository.
|
||||||
*
|
*
|
||||||
|
|
|
@ -40,6 +40,10 @@ public class ArtifactReportProcessorTest
|
||||||
|
|
||||||
private DefaultArtifactReportProcessor processor;
|
private DefaultArtifactReportProcessor processor;
|
||||||
|
|
||||||
|
private static final boolean ARTIFACT_FOUND = true;
|
||||||
|
|
||||||
|
private static final boolean ARTIFACT_NOT_FOUND = false;
|
||||||
|
|
||||||
protected void setUp()
|
protected void setUp()
|
||||||
throws Exception
|
throws Exception
|
||||||
{
|
{
|
||||||
|
@ -64,7 +68,7 @@ public class ArtifactReportProcessorTest
|
||||||
public void testNoProjectDescriptor()
|
public void testNoProjectDescriptor()
|
||||||
{
|
{
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
setRequiredElements( artifact, VALID, VALID, VALID );
|
setRequiredElements( artifact, VALID, VALID, VALID );
|
||||||
processor.processArtifact( null, artifact, reporter, null );
|
processor.processArtifact( null, artifact, reporter, null );
|
||||||
|
@ -79,7 +83,7 @@ public class ArtifactReportProcessorTest
|
||||||
public void testArtifactFoundButNoDirectDependencies()
|
public void testArtifactFoundButNoDirectDependencies()
|
||||||
{
|
{
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
setRequiredElements( artifact, VALID, VALID, VALID );
|
setRequiredElements( artifact, VALID, VALID, VALID );
|
||||||
processor.processArtifact( model, artifact, reporter, null );
|
processor.processArtifact( model, artifact, reporter, null );
|
||||||
|
@ -91,7 +95,7 @@ public class ArtifactReportProcessorTest
|
||||||
public void testArtifactNotFound()
|
public void testArtifactNotFound()
|
||||||
{
|
{
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_NOT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_NOT_FOUND );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
setRequiredElements( artifact, VALID, VALID, VALID );
|
setRequiredElements( artifact, VALID, VALID, VALID );
|
||||||
processor.processArtifact( model, artifact, reporter, null );
|
processor.processArtifact( model, artifact, reporter, null );
|
||||||
|
@ -110,12 +114,12 @@ public class ArtifactReportProcessorTest
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, VALID, VALID );
|
setRequiredElements( artifact, VALID, VALID, VALID );
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
Dependency dependency = new Dependency();
|
Dependency dependency = new Dependency();
|
||||||
setRequiredElements( dependency, VALID, VALID, VALID );
|
setRequiredElements( dependency, VALID, VALID, VALID );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
processor.processArtifact( model, artifact, reporter, null );
|
processor.processArtifact( model, artifact, reporter, null );
|
||||||
|
@ -131,12 +135,12 @@ public class ArtifactReportProcessorTest
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, VALID, VALID );
|
setRequiredElements( artifact, VALID, VALID, VALID );
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
Dependency dependency = new Dependency();
|
Dependency dependency = new Dependency();
|
||||||
setRequiredElements( dependency, VALID, VALID, VALID );
|
setRequiredElements( dependency, VALID, VALID, VALID );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
processor.processArtifact( model, artifact, reporter, null );
|
processor.processArtifact( model, artifact, reporter, null );
|
||||||
|
@ -151,20 +155,20 @@ public class ArtifactReportProcessorTest
|
||||||
processor.setArtifactFactory( artifactFactory );
|
processor.setArtifactFactory( artifactFactory );
|
||||||
|
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
Dependency dependency = new Dependency();
|
Dependency dependency = new Dependency();
|
||||||
setRequiredElements( dependency, VALID, VALID, VALID );
|
setRequiredElements( dependency, VALID, VALID, VALID );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, VALID, VALID );
|
setRequiredElements( artifact, VALID, VALID, VALID );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
|
@ -180,20 +184,20 @@ public class ArtifactReportProcessorTest
|
||||||
processor.setArtifactFactory( artifactFactory );
|
processor.setArtifactFactory( artifactFactory );
|
||||||
|
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
Dependency dependency = new Dependency();
|
Dependency dependency = new Dependency();
|
||||||
setRequiredElements( dependency, VALID, VALID, VALID );
|
setRequiredElements( dependency, VALID, VALID, VALID );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_NOT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_NOT_FOUND );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, VALID, VALID );
|
setRequiredElements( artifact, VALID, VALID, VALID );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
|
@ -210,7 +214,7 @@ public class ArtifactReportProcessorTest
|
||||||
public void testEmptyGroupId()
|
public void testEmptyGroupId()
|
||||||
{
|
{
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
|
|
||||||
setRequiredElements( artifact, EMPTY_STRING, VALID, VALID );
|
setRequiredElements( artifact, EMPTY_STRING, VALID, VALID );
|
||||||
|
@ -227,7 +231,7 @@ public class ArtifactReportProcessorTest
|
||||||
public void testEmptyArtifactId()
|
public void testEmptyArtifactId()
|
||||||
{
|
{
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, EMPTY_STRING, VALID );
|
setRequiredElements( artifact, VALID, EMPTY_STRING, VALID );
|
||||||
|
@ -244,7 +248,7 @@ public class ArtifactReportProcessorTest
|
||||||
public void testEmptyVersion()
|
public void testEmptyVersion()
|
||||||
{
|
{
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, VALID, EMPTY_STRING );
|
setRequiredElements( artifact, VALID, VALID, EMPTY_STRING );
|
||||||
|
@ -261,7 +265,7 @@ public class ArtifactReportProcessorTest
|
||||||
public void testNullGroupId()
|
public void testNullGroupId()
|
||||||
{
|
{
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
|
|
||||||
setRequiredElements( artifact, null, VALID, VALID );
|
setRequiredElements( artifact, null, VALID, VALID );
|
||||||
|
@ -278,7 +282,7 @@ public class ArtifactReportProcessorTest
|
||||||
public void testNullArtifactId()
|
public void testNullArtifactId()
|
||||||
{
|
{
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, null, VALID );
|
setRequiredElements( artifact, VALID, null, VALID );
|
||||||
|
@ -295,7 +299,7 @@ public class ArtifactReportProcessorTest
|
||||||
public void testNullVersion()
|
public void testNullVersion()
|
||||||
{
|
{
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, VALID, null );
|
setRequiredElements( artifact, VALID, VALID, null );
|
||||||
|
@ -312,7 +316,7 @@ public class ArtifactReportProcessorTest
|
||||||
public void testMultipleFailures()
|
public void testMultipleFailures()
|
||||||
{
|
{
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
|
|
||||||
setRequiredElements( artifact, null, null, null );
|
setRequiredElements( artifact, null, null, null );
|
||||||
|
@ -337,12 +341,12 @@ public class ArtifactReportProcessorTest
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, VALID, VALID );
|
setRequiredElements( artifact, VALID, VALID, VALID );
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
Dependency dependency = new Dependency();
|
Dependency dependency = new Dependency();
|
||||||
setRequiredElements( dependency, null, VALID, VALID );
|
setRequiredElements( dependency, null, VALID, VALID );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
processor.processArtifact( model, artifact, reporter, null );
|
processor.processArtifact( model, artifact, reporter, null );
|
||||||
|
@ -362,12 +366,12 @@ public class ArtifactReportProcessorTest
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, VALID, VALID );
|
setRequiredElements( artifact, VALID, VALID, VALID );
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
Dependency dependency = new Dependency();
|
Dependency dependency = new Dependency();
|
||||||
setRequiredElements( dependency, VALID, null, VALID );
|
setRequiredElements( dependency, VALID, null, VALID );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
processor.processArtifact( model, artifact, reporter, null );
|
processor.processArtifact( model, artifact, reporter, null );
|
||||||
|
@ -387,12 +391,12 @@ public class ArtifactReportProcessorTest
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, VALID, VALID );
|
setRequiredElements( artifact, VALID, VALID, VALID );
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
Dependency dependency = new Dependency();
|
Dependency dependency = new Dependency();
|
||||||
setRequiredElements( dependency, VALID, VALID, null );
|
setRequiredElements( dependency, VALID, VALID, null );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
processor.processArtifact( model, artifact, reporter, null );
|
processor.processArtifact( model, artifact, reporter, null );
|
||||||
|
@ -412,12 +416,12 @@ public class ArtifactReportProcessorTest
|
||||||
|
|
||||||
setRequiredElements( artifact, VALID, VALID, VALID );
|
setRequiredElements( artifact, VALID, VALID, VALID );
|
||||||
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer();
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
Dependency dependency = new Dependency();
|
Dependency dependency = new Dependency();
|
||||||
setRequiredElements( dependency, null, null, null );
|
setRequiredElements( dependency, null, null, null );
|
||||||
model.addDependency( dependency );
|
model.addDependency( dependency );
|
||||||
queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND );
|
queryLayer.addReturnValue( ARTIFACT_FOUND );
|
||||||
|
|
||||||
processor.setRepositoryQueryLayer( queryLayer );
|
processor.setRepositoryQueryLayer( queryLayer );
|
||||||
processor.processArtifact( model, artifact, reporter, null );
|
processor.processArtifact( model, artifact, reporter, null );
|
||||||
|
|
|
@ -134,10 +134,10 @@ public class CacheTest
|
||||||
assertEquals( "check cache size", 1, cache.size() );
|
assertEquals( "check cache size", 1, cache.size() );
|
||||||
assertNull( "check cache miss", cache.get( "none" ) );
|
assertNull( "check cache miss", cache.get( "none" ) );
|
||||||
assertEquals( CACHE_HIT_RATIO, cache.getHitRate(), 0 );
|
assertEquals( CACHE_HIT_RATIO, cache.getHitRate(), 0 );
|
||||||
cache.flush();
|
cache.clear();
|
||||||
assertNull( "check flushed object", cache.get( "key" ) );
|
assertNull( "check flushed object", cache.get( "key" ) );
|
||||||
assertEquals( (double) 0, cache.getHitRate(), 0 );
|
assertEquals( (double) 0, cache.getHitRate(), 0 );
|
||||||
assertEquals( "check flushed cache size", 0, cache.size() );
|
assertEquals( "check flushed cache size", 0, cache.size() );
|
||||||
cache.flush();
|
cache.clear();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -69,7 +69,6 @@ public class MockRepositoryQueryLayer
|
||||||
|
|
||||||
public boolean containsArtifact( Artifact artifact, Snapshot snapshot )
|
public boolean containsArtifact( Artifact artifact, Snapshot snapshot )
|
||||||
{
|
{
|
||||||
// TODO
|
|
||||||
return containsArtifact( artifact );
|
return containsArtifact( artifact );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue