diff --git a/maven-repository-reports-standard/pom.xml b/maven-repository-reports-standard/pom.xml index 02d2de17b..792359141 100755 --- a/maven-repository-reports-standard/pom.xml +++ b/maven-repository-reports-standard/pom.xml @@ -57,25 +57,9 @@ org.apache.maven maven-artifact-manager - + + org.apache.maven.repository + maven-repository-utils + - - - - org.apache.maven.plugins - maven-surefire-plugin - - - **/Abstract*.java - **/Test*.java - - - - - diff --git a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/BadMetadataReportProcessor.java b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/BadMetadataReportProcessor.java index 027af116f..fdba18d39 100644 --- a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/BadMetadataReportProcessor.java +++ b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/BadMetadataReportProcessor.java @@ -268,7 +268,6 @@ public class BadMetadataReportProcessor { boolean hasFailures = false; Versioning versioning = metadata.getMetadata().getVersioning(); - // TODO: change this to look for repository artifacts. It needs to centre around that I think, currently this is hardwired to the default layout File versionsDir = new File( repository.getBasedir(), repository.pathOfRemoteRepositoryMetadata( metadata ) ).getParentFile(); List versions = FileUtils.getFileNames( versionsDir, "*/*.pom", null, false ); diff --git a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/Cache.java b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/Cache.java index 45df43027..02809c60e 100644 --- a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/Cache.java +++ b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/Cache.java @@ -16,21 +16,20 @@ package org.apache.maven.repository.reporting; * limitations under the License. */ -import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.Map; /** - * Class to implement caching + * Class to implement caching. */ public class Cache { - private Map cache; + private final Map cache; - private DblLinkedList mostRecent; + private final double cacheHitRatio; - private double cacheHitRatio; - - private long cacheMaxSize; + private final int cacheMaxSize; private long cacheHits; @@ -47,7 +46,7 @@ public class Cache /** * Caches all data and expires only the oldest data when the maximum cache size is reached */ - public Cache( long cacheMaxSize ) + public Cache( int cacheMaxSize ) { this( (double) 1, cacheMaxSize ); } @@ -56,12 +55,19 @@ public class Cache * Caches all data and expires only the oldest data when either the specified cache hit rate is reached * or the maximum cache size is reached. */ - public Cache( double cacheHitRatio, long cacheMaxSize ) + public Cache( double cacheHitRatio, int cacheMaxSize ) { this.cacheHitRatio = cacheHitRatio; this.cacheMaxSize = cacheMaxSize; - cache = new HashMap(); + if ( cacheMaxSize > 0 ) + { + cache = new LinkedHashMap( cacheMaxSize ); + } + else + { + cache = new LinkedHashMap(); + } } /** @@ -72,15 +78,19 @@ public class Cache */ public boolean containsKey( Object key ) { - boolean contains = cache.containsKey( key ); + boolean contains; + synchronized ( cache ) + { + contains = cache.containsKey( key ); - if ( contains ) - { - cacheHits++; - } - else - { - cacheMiss++; + if ( contains ) + { + cacheHits++; + } + else + { + cacheMiss++; + } } return contains; @@ -96,19 +106,21 @@ public class Cache { Object retValue = null; - if ( cache.containsKey( key ) ) + synchronized ( cache ) { - DblLinkedList cacheEntry = (DblLinkedList) cache.get( key ); + if ( cache.containsKey( key ) ) + { + // remove and put: this promotes it to the top since we use a linked hash map + retValue = cache.remove( key ); - makeMostRecent( cacheEntry ); + cache.put( key, retValue ); - retValue = cacheEntry.getCacheValue(); - - cacheHits++; - } - else - { - cacheMiss++; + cacheHits++; + } + else + { + cacheMiss++; + } } return retValue; @@ -120,21 +132,24 @@ public class Cache * @param key the object to map the valued object * @param value the object to cache */ - public void put( Object key, Object value ) + public Object put( Object key, Object value ) { - DblLinkedList entry; - if ( !cache.containsKey( key ) ) + Object old = null; + + // remove and put: this promotes it to the top since we use a linked hash map + synchronized ( cache ) { - entry = new DblLinkedList( key, value ); - cache.put( key, entry ); - manageCache(); - } - else - { - entry = (DblLinkedList) cache.get( key ); + if ( cache.containsKey( key ) ) + { + old = cache.remove( key ); + } + + cache.put( key, value ); } - makeMostRecent( entry ); + manageCache(); + + return old; } /** @@ -144,13 +159,16 @@ public class Cache */ public double getHitRate() { - return cacheHits == 0 && cacheMiss == 0 ? 0 : (double) cacheHits / (double) ( cacheHits + cacheMiss ); + synchronized ( cache ) + { + return cacheHits == 0 && cacheMiss == 0 ? 0 : (double) cacheHits / (double) ( cacheHits + cacheMiss ); + } } /** * Get the total number of cache objects currently cached. */ - public long size() + public int size() { return cache.size(); } @@ -158,156 +176,50 @@ public class Cache /** * Empty the cache and reset the cache hit rate */ - public void flush() + public void clear() { - while ( cache.size() > 0 ) + synchronized ( cache ) { - trimCache(); + cacheHits = 0; + cacheMiss = 0; + cache.clear(); } - cacheHits = 0; - cacheMiss = 0; - cache = new HashMap(); - } - - private void makeMostRecent( DblLinkedList list ) - { - if ( mostRecent != null ) - { - if ( !mostRecent.equals( list ) ) - { - removeFromLinks( list ); - - list.setNext( mostRecent ); - mostRecent.setPrev( list ); - - mostRecent = list; - } - } - else if ( list != null ) - { - removeFromLinks( list ); - - mostRecent = list; - } - } - - private void removeFromLinks( DblLinkedList list ) - { - if ( list.getPrev() != null ) - { - list.getPrev().setNext( list.getNext() ); - } - if ( list.getNext() != null ) - { - list.getNext().setPrev( list.getPrev() ); - } - - list.setPrev( null ); - list.setNext( null ); } private void manageCache() { - if ( cacheMaxSize == 0 ) + synchronized ( cache ) { - //desired HitRatio is reached, we can trim the cache to conserve memory - if ( cacheHitRatio <= getHitRate() ) + Iterator iterator = cache.entrySet().iterator(); + if ( cacheMaxSize == 0 ) { - trimCache(); + //desired HitRatio is reached, we can trim the cache to conserve memory + if ( cacheHitRatio <= getHitRate() ) + { + iterator.next(); + iterator.remove(); + } } - } - else if ( cache.size() > cacheMaxSize ) - { - // maximum cache size is reached - while ( cache.size() > cacheMaxSize ) + else if ( cache.size() > cacheMaxSize ) { - trimCache(); + // maximum cache size is reached + while ( cache.size() > cacheMaxSize ) + { + iterator.next(); + iterator.remove(); + } } - } - else - { - //even though the max has not been reached, the desired HitRatio is already reached, - // so we can trim the cache to conserve memory - if ( cacheHitRatio <= getHitRate() ) + else { - trimCache(); + //even though the max has not been reached, the desired HitRatio is already reached, + // so we can trim the cache to conserve memory + if ( cacheHitRatio <= getHitRate() ) + { + iterator.next(); + iterator.remove(); + } } } } - private void trimCache() - { - DblLinkedList leastRecent = getLeastRecent(); - cache.remove( leastRecent.getCacheKey() ); - if ( cache.size() > 0 ) - { - removeFromLinks( leastRecent ); - } - else - { - mostRecent = null; - } - } - - private DblLinkedList getLeastRecent() - { - DblLinkedList trail = mostRecent; - - while ( trail.getNext() != null ) - { - trail = trail.getNext(); - } - - return trail; - } - - /** - * @todo replace with standard collection (commons-collections?) - */ - private static class DblLinkedList - { - private Object cacheKey; - - private Object cacheValue; - - private DblLinkedList prev; - - private DblLinkedList next; - - DblLinkedList( Object key, Object value ) - { - this.cacheKey = key; - this.cacheValue = value; - } - - public DblLinkedList getNext() - { - return next; - } - - public Object getCacheValue() - { - return cacheValue; - } - - public void setPrev( DblLinkedList prev ) - { - this.prev = prev; - } - - public void setNext( DblLinkedList next ) - { - this.next = next; - } - - public Object getCacheKey() - { - return cacheKey; - } - - public DblLinkedList getPrev() - { - return prev; - } - } } diff --git a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/CachedRepositoryQueryLayer.java b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/CachedRepositoryQueryLayer.java index d52453e91..f4bf85ff6 100644 --- a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/CachedRepositoryQueryLayer.java +++ b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/CachedRepositoryQueryLayer.java @@ -49,7 +49,6 @@ public class CachedRepositoryQueryLayer { boolean artifactFound = true; - // @todo should check for snapshot artifacts String artifactPath = repository.getBasedir() + "/" + repository.pathOf( artifact ); if ( cache.get( artifactPath ) == null ) diff --git a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/ChecksumArtifactReporter.java b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/ChecksumArtifactReporter.java index c91ddfe29..f9fcd903a 100644 --- a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/ChecksumArtifactReporter.java +++ b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/ChecksumArtifactReporter.java @@ -18,23 +18,18 @@ package org.apache.maven.repository.reporting; import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; -import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; import org.apache.maven.model.Model; +import org.apache.maven.repository.digest.Digester; import org.codehaus.plexus.util.FileUtils; import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; import java.io.IOException; -import java.io.InputStream; -import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; /** * This class reports invalid and mismatched checksums of artifacts and metadata files. * It validates MD5 and SHA-1 checksums. * - * @todo remove stateful parts, change to singleton instantiation * @plexus.component role="org.apache.maven.repository.reporting.ArtifactReportProcessor" role-hint="checksum" */ public class ChecksumArtifactReporter @@ -44,6 +39,9 @@ public class ChecksumArtifactReporter private static final int CHECKSUM_BUFFER_SIZE = 16384; + /** @plexus.requirement */ + private Digester digester; + /** * Validate the checksum of the specified artifact. * @@ -71,7 +69,7 @@ public class ChecksumArtifactReporter { try { - if ( validateChecksum( file, md5File, "MD5" ) ) + if ( digester.verifyChecksum( file, FileUtils.fileRead( md5File ), Digester.MD5 ) ) { reporter.addSuccess( artifact ); } @@ -99,7 +97,7 @@ public class ChecksumArtifactReporter { try { - if ( validateChecksum( file, sha1File, "SHA-1" ) ) + if ( digester.verifyChecksum( file, FileUtils.fileRead( sha1File ), Digester.SHA1 ) ) { reporter.addSuccess( artifact ); } @@ -122,169 +120,4 @@ public class ChecksumArtifactReporter reporter.addFailure( artifact, "SHA-1 checksum file does not exist." ); } } - - /** - * Validate the checksums of the metadata. Get the metadata file from the - * repository then validate the checksum. - */ - public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter ) - { - if ( !"file".equals( repository.getProtocol() ) ) - { - // We can't check other types of URLs yet. Need to use Wagon, with an exists() method. - throw new UnsupportedOperationException( - "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" ); - } - - //check if checksum files exist - String path = repository.pathOfRemoteRepositoryMetadata( metadata ); - File file = new File( repository.getBasedir(), path ); - - File md5File = new File( repository.getBasedir(), path + ".md5" ); - if ( md5File.exists() ) - { - try - { - if ( validateChecksum( file, md5File, "MD5" ) ) - { - reporter.addSuccess( metadata ); - } - else - { - reporter.addFailure( metadata, "MD5 checksum does not match." ); - } - } - catch ( NoSuchAlgorithmException e ) - { - reporter.addFailure( metadata, "Unable to read MD5: " + e.getMessage() ); - } - catch ( IOException e ) - { - reporter.addFailure( metadata, "Unable to read MD5: " + e.getMessage() ); - } - } - else - { - reporter.addFailure( metadata, "MD5 checksum file does not exist." ); - } - - File sha1File = new File( repository.getBasedir(), path + ".sha1" ); - if ( sha1File.exists() ) - { - try - { - if ( validateChecksum( file, sha1File, "SHA-1" ) ) - { - reporter.addSuccess( metadata ); - } - else - { - reporter.addFailure( metadata, "SHA-1 checksum does not match." ); - } - } - catch ( NoSuchAlgorithmException e ) - { - reporter.addFailure( metadata, "Unable to read SHA1: " + e.getMessage() ); - } - catch ( IOException e ) - { - reporter.addFailure( metadata, "Unable to read SHA1: " + e.getMessage() ); - } - } - else - { - reporter.addFailure( metadata, "SHA-1 checksum file does not exist." ); - } - - } - - /** - * Validate the checksum of the file. - * - * @param file The file to be validated. - * @param checksumFile the checksum to validate against - * @param algo The checksum algorithm used. - */ - private boolean validateChecksum( File file, File checksumFile, String algo ) - throws NoSuchAlgorithmException, IOException - { - //Create checksum for jar file - byte[] chk1 = createChecksum( file, algo ); - //read the checksum file - String checksum = FileUtils.fileRead( checksumFile ); - - return checksum.toUpperCase().equals( byteArrayToHexStr( chk1 ).toUpperCase() ); - } - - /** - * Create a checksum from the specified metadata file. - * - * @param file The file that will be created a checksum. - * @param algo The algorithm to be used (MD5, SHA-1) - * @return - * @throws FileNotFoundException - * @throws NoSuchAlgorithmException - * @throws IOException - * @todo move to utility class - */ - private static byte[] createChecksum( File file, String algo ) - throws FileNotFoundException, NoSuchAlgorithmException, IOException - { - MessageDigest digest = MessageDigest.getInstance( algo ); - - InputStream fis = new FileInputStream( file ); - try - { - byte[] buffer = new byte[CHECKSUM_BUFFER_SIZE]; - int numRead; - do - { - numRead = fis.read( buffer ); - if ( numRead > 0 ) - { - digest.update( buffer, 0, numRead ); - } - } - while ( numRead != -1 ); - } - finally - { - fis.close(); - } - - return digest.digest(); - } - - /** - * Convert an incoming array of bytes into a string that represents each of - * the bytes as two hex characters. - * - * @param data - * @todo move to utilities - */ - public static String byteArrayToHexStr( byte[] data ) - { - String output = ""; - - for ( int cnt = 0; cnt < data.length; cnt++ ) - { - //Deposit a byte into the 8 lsb of an int. - int tempInt = data[cnt] & BYTE_MASK; - - //Get hex representation of the int as a string. - String tempStr = Integer.toHexString( tempInt ); - - //Append a leading 0 if necessary so that each hex string will contain 2 characters. - if ( tempStr.length() == 1 ) - { - tempStr = "0" + tempStr; - } - - //Concatenate the two characters to the output string. - output = output + tempStr; - } - - return output.toUpperCase(); - } - } diff --git a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/ChecksumMetadataReporter.java b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/ChecksumMetadataReporter.java index 530524c55..22c7dffe4 100644 --- a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/ChecksumMetadataReporter.java +++ b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/ChecksumMetadataReporter.java @@ -16,30 +16,100 @@ package org.apache.maven.repository.reporting; * limitations under the License. */ -import org.apache.maven.artifact.Artifact; import org.apache.maven.artifact.repository.ArtifactRepository; import org.apache.maven.artifact.repository.metadata.RepositoryMetadata; -import org.apache.maven.model.Model; +import org.apache.maven.repository.digest.Digester; +import org.codehaus.plexus.util.FileUtils; import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.net.URL; -import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; /** * This class reports invalid and mismatched checksums of artifacts and metadata files. * It validates MD5 and SHA-1 checksums. * - * @todo remove this duplicate class for the parent using one role - * @plexus.component role="org.apache.maven.repository.reporting.MetadataReportProcessor" role-hint="checksum-metadata" instantiation-strategy="per-lookup" + * @plexus.component role="org.apache.maven.repository.reporting.MetadataReportProcessor" role-hint="checksum-metadata" */ public class ChecksumMetadataReporter - extends ChecksumArtifactReporter implements MetadataReportProcessor { + /** @plexus.requirement */ + private Digester digester; + + /** + * Validate the checksums of the metadata. Get the metadata file from the + * repository then validate the checksum. + */ + public void processMetadata( RepositoryMetadata metadata, ArtifactRepository repository, ArtifactReporter reporter ) + { + if ( !"file".equals( repository.getProtocol() ) ) + { + // We can't check other types of URLs yet. Need to use Wagon, with an exists() method. + throw new UnsupportedOperationException( + "Can't process repository '" + repository.getUrl() + "'. Only file based repositories are supported" ); + } + + //check if checksum files exist + String path = repository.pathOfRemoteRepositoryMetadata( metadata ); + File file = new File( repository.getBasedir(), path ); + + File md5File = new File( repository.getBasedir(), path + ".md5" ); + if ( md5File.exists() ) + { + try + { + if ( digester.verifyChecksum( file, FileUtils.fileRead( md5File ), Digester.MD5 ) ) + { + reporter.addSuccess( metadata ); + } + else + { + reporter.addFailure( metadata, "MD5 checksum does not match." ); + } + } + catch ( NoSuchAlgorithmException e ) + { + reporter.addFailure( metadata, "Unable to read MD5: " + e.getMessage() ); + } + catch ( IOException e ) + { + reporter.addFailure( metadata, "Unable to read MD5: " + e.getMessage() ); + } + } + else + { + reporter.addFailure( metadata, "MD5 checksum file does not exist." ); + } + + File sha1File = new File( repository.getBasedir(), path + ".sha1" ); + if ( sha1File.exists() ) + { + try + { + if ( digester.verifyChecksum( file, FileUtils.fileRead( sha1File ), Digester.SHA1 ) ) + { + reporter.addSuccess( metadata ); + } + else + { + reporter.addFailure( metadata, "SHA-1 checksum does not match." ); + } + } + catch ( NoSuchAlgorithmException e ) + { + reporter.addFailure( metadata, "Unable to read SHA1: " + e.getMessage() ); + } + catch ( IOException e ) + { + reporter.addFailure( metadata, "Unable to read SHA1: " + e.getMessage() ); + } + } + else + { + reporter.addFailure( metadata, "SHA-1 checksum file does not exist." ); + } + + } + } diff --git a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/InvalidPomArtifactReportProcessor.java b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/InvalidPomArtifactReportProcessor.java index 5ff2fa1e6..a11f35754 100644 --- a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/InvalidPomArtifactReportProcessor.java +++ b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/InvalidPomArtifactReportProcessor.java @@ -42,7 +42,6 @@ public class InvalidPomArtifactReportProcessor * @param artifact The pom xml file to be validated, passed as an artifact object. * @param reporter The artifact reporter object. * @param repository the repository where the artifact is located. - * @todo fix repo paths */ public void processArtifact( Model model, Artifact artifact, ArtifactReporter reporter, ArtifactRepository repository ) diff --git a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/RepositoryQueryLayer.java b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/RepositoryQueryLayer.java index bb4117015..950f7a252 100644 --- a/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/RepositoryQueryLayer.java +++ b/maven-repository-reports-standard/src/main/java/org/apache/maven/repository/reporting/RepositoryQueryLayer.java @@ -28,12 +28,9 @@ public interface RepositoryQueryLayer { String ROLE = RepositoryQueryLayer.class.getName(); - boolean ARTIFACT_FOUND = true; - - boolean ARTIFACT_NOT_FOUND = false; - boolean containsArtifact( Artifact artifact ); + /** @todo I believe we can remove this [BP] - artifact should contain all the necessary version info */ boolean containsArtifact( Artifact artifact, Snapshot snapshot ); List getVersions( Artifact artifact ) diff --git a/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/AbstractChecksumArtifactReporterTestCase.java b/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/AbstractChecksumArtifactReporterTestCase.java index 770879b0e..c8d03eb81 100644 --- a/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/AbstractChecksumArtifactReporterTestCase.java +++ b/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/AbstractChecksumArtifactReporterTestCase.java @@ -16,21 +16,18 @@ package org.apache.maven.repository.reporting; * limitations under the License. */ +import org.apache.maven.repository.digest.Digester; import org.codehaus.plexus.util.FileUtils; import org.codehaus.plexus.util.IOUtil; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileReader; import java.io.IOException; -import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; -import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.jar.JarEntry; import java.util.jar.JarOutputStream; @@ -48,12 +45,14 @@ public abstract class AbstractChecksumArtifactReporterTestCase private static final String metadataChecksumFilename = "maven-metadata"; - private static final int CHECKSUM_BUFFER_SIZE = 256; + private Digester digester; public void setUp() throws Exception { super.setUp(); + + digester = (Digester) lookup( Digester.ROLE ); } /** @@ -122,9 +121,9 @@ public abstract class AbstractChecksumArtifactReporterTestCase if ( dirFiles.mkdirs() ) { - // create a jar file - FileOutputStream f = new FileOutputStream( repoUrl + relativePath + dirs + "/" + filename + "." + type ); + String path = repoUrl + relativePath + dirs + "/" + filename + "." + type; + FileOutputStream f = new FileOutputStream( path ); JarOutputStream out = new JarOutputStream( new BufferedOutputStream( f ) ); // jar sample.txt @@ -138,42 +137,34 @@ public abstract class AbstractChecksumArtifactReporterTestCase out.close(); //Create md5 and sha-1 checksum files.. - byte[] md5chk = createChecksum( repoUrl + relativePath + dirs + "/" + filename + "." + type, "MD5" ); - byte[] sha1chk = createChecksum( repoUrl + relativePath + dirs + "/" + filename + "." + type, "SHA-1" ); - File file; - - if ( md5chk != null ) + File file = new File( path + ".md5" ); + OutputStream os = new FileOutputStream( file ); + OutputStreamWriter osw = new OutputStreamWriter( os ); + String sum = digester.createChecksum( new File( path ), Digester.MD5 ); + if ( !isValid ) { - file = new File( repoUrl + relativePath + dirs + "/" + filename + "." + type + ".md5" ); - OutputStream os = new FileOutputStream( file ); - OutputStreamWriter osw = new OutputStreamWriter( os ); - if ( !isValid ) - { - osw.write( ChecksumArtifactReporter.byteArrayToHexStr( md5chk ) + "1" ); - } - else - { - osw.write( ChecksumArtifactReporter.byteArrayToHexStr( md5chk ) ); - } - osw.close(); + osw.write( sum + "1" ); } - - if ( sha1chk != null ) + else { - file = new File( repoUrl + relativePath + dirs + "/" + filename + "." + type + ".sha1" ); - OutputStream os = new FileOutputStream( file ); - OutputStreamWriter osw = new OutputStreamWriter( os ); - if ( !isValid ) - { - osw.write( ChecksumArtifactReporter.byteArrayToHexStr( sha1chk ) + "2" ); - } - else - { - osw.write( ChecksumArtifactReporter.byteArrayToHexStr( sha1chk ) ); - } - osw.close(); + osw.write( sum ); } + osw.close(); + + file = new File( path + ".sha1" ); + os = new FileOutputStream( file ); + osw = new OutputStreamWriter( os ); + String sha1sum = digester.createChecksum( new File( path ), Digester.SHA1 ); + if ( !isValid ) + { + osw.write( sha1sum + "2" ); + } + else + { + osw.write( sha1sum ); + } + osw.close(); } } @@ -192,45 +183,38 @@ public abstract class AbstractChecksumArtifactReporterTestCase String repoUrl = repository.getBasedir(); String url = repository.getBasedir() + "/" + filename + "." + type; - FileUtils.copyFile( new File( url ), new File( repoUrl + relativePath + filename + "." + type ) ); + String path = repoUrl + relativePath + filename + "." + type; + FileUtils.copyFile( new File( url ), new File( path ) ); //Create md5 and sha-1 checksum files.. - byte[] md5chk = createChecksum( repoUrl + relativePath + filename + "." + type, "MD5" ); - byte[] sha1chk = createChecksum( repoUrl + relativePath + filename + "." + type, "SHA-1" ); - File file; - - if ( md5chk != null ) + File file = new File( path + ".md5" ); + OutputStream os = new FileOutputStream( file ); + OutputStreamWriter osw = new OutputStreamWriter( os ); + String md5sum = digester.createChecksum( new File( path ), Digester.MD5 ); + if ( !isValid ) { - file = new File( repoUrl + relativePath + filename + "." + type + ".md5" ); - OutputStream os = new FileOutputStream( file ); - OutputStreamWriter osw = new OutputStreamWriter( os ); - if ( !isValid ) - { - osw.write( ChecksumArtifactReporter.byteArrayToHexStr( md5chk ) + "1" ); - } - else - { - osw.write( ChecksumArtifactReporter.byteArrayToHexStr( md5chk ) ); - } - osw.close(); + osw.write( md5sum + "1" ); } - - if ( sha1chk != null ) + else { - file = new File( repoUrl + relativePath + filename + "." + type + ".sha1" ); - OutputStream os = new FileOutputStream( file ); - OutputStreamWriter osw = new OutputStreamWriter( os ); - if ( !isValid ) - { - osw.write( ChecksumArtifactReporter.byteArrayToHexStr( sha1chk ) + "2" ); - } - else - { - osw.write( ChecksumArtifactReporter.byteArrayToHexStr( sha1chk ) ); - } - osw.close(); + osw.write( md5sum ); } + osw.close(); + + file = new File( path + ".sha1" ); + os = new FileOutputStream( file ); + osw = new OutputStreamWriter( os ); + String sha1sum = digester.createChecksum( new File( path ), Digester.SHA1 ); + if ( !isValid ) + { + osw.write( sha1sum + "2" ); + } + else + { + osw.write( sha1sum ); + } + osw.close(); } /** @@ -248,37 +232,6 @@ public abstract class AbstractChecksumArtifactReporterTestCase osw.close(); } - /** - * Create a checksum from the specified metadata file. - * - * @throws FileNotFoundException - * @throws NoSuchAlgorithmException - * @throws IOException - */ - private byte[] createChecksum( String filename, String algo ) - throws FileNotFoundException, NoSuchAlgorithmException, IOException - { - - // TODO: share with ArtifactRepositoryIndex.getChecksum(), ChecksumArtifactReporter.getChecksum() - InputStream fis = new FileInputStream( filename ); - byte[] buffer = new byte[CHECKSUM_BUFFER_SIZE]; - - MessageDigest complete = MessageDigest.getInstance( algo ); - int numRead; - do - { - numRead = fis.read( buffer ); - if ( numRead > 0 ) - { - complete.update( buffer, 0, numRead ); - } - } - while ( numRead != -1 ); - fis.close(); - - return complete.digest(); - } - /** * Delete the test directory created in the repository. * diff --git a/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/ArtifactReportProcessorTest.java b/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/ArtifactReportProcessorTest.java index 450fc0389..4d6c697f3 100644 --- a/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/ArtifactReportProcessorTest.java +++ b/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/ArtifactReportProcessorTest.java @@ -40,6 +40,10 @@ public class ArtifactReportProcessorTest private DefaultArtifactReportProcessor processor; + private static final boolean ARTIFACT_FOUND = true; + + private static final boolean ARTIFACT_NOT_FOUND = false; + protected void setUp() throws Exception { @@ -64,7 +68,7 @@ public class ArtifactReportProcessorTest public void testNoProjectDescriptor() { MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); setRequiredElements( artifact, VALID, VALID, VALID ); processor.processArtifact( null, artifact, reporter, null ); @@ -79,7 +83,7 @@ public class ArtifactReportProcessorTest public void testArtifactFoundButNoDirectDependencies() { MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); setRequiredElements( artifact, VALID, VALID, VALID ); processor.processArtifact( model, artifact, reporter, null ); @@ -91,7 +95,7 @@ public class ArtifactReportProcessorTest public void testArtifactNotFound() { MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_NOT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_NOT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); setRequiredElements( artifact, VALID, VALID, VALID ); processor.processArtifact( model, artifact, reporter, null ); @@ -110,12 +114,12 @@ public class ArtifactReportProcessorTest setRequiredElements( artifact, VALID, VALID, VALID ); MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); Dependency dependency = new Dependency(); setRequiredElements( dependency, VALID, VALID, VALID ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); processor.processArtifact( model, artifact, reporter, null ); @@ -131,12 +135,12 @@ public class ArtifactReportProcessorTest setRequiredElements( artifact, VALID, VALID, VALID ); MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); Dependency dependency = new Dependency(); setRequiredElements( dependency, VALID, VALID, VALID ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); processor.processArtifact( model, artifact, reporter, null ); @@ -151,20 +155,20 @@ public class ArtifactReportProcessorTest processor.setArtifactFactory( artifactFactory ); MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); Dependency dependency = new Dependency(); setRequiredElements( dependency, VALID, VALID, VALID ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); setRequiredElements( artifact, VALID, VALID, VALID ); processor.setRepositoryQueryLayer( queryLayer ); @@ -180,20 +184,20 @@ public class ArtifactReportProcessorTest processor.setArtifactFactory( artifactFactory ); MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); Dependency dependency = new Dependency(); setRequiredElements( dependency, VALID, VALID, VALID ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_NOT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_NOT_FOUND ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); setRequiredElements( artifact, VALID, VALID, VALID ); processor.setRepositoryQueryLayer( queryLayer ); @@ -210,7 +214,7 @@ public class ArtifactReportProcessorTest public void testEmptyGroupId() { MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); setRequiredElements( artifact, EMPTY_STRING, VALID, VALID ); @@ -227,7 +231,7 @@ public class ArtifactReportProcessorTest public void testEmptyArtifactId() { MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); setRequiredElements( artifact, VALID, EMPTY_STRING, VALID ); @@ -244,7 +248,7 @@ public class ArtifactReportProcessorTest public void testEmptyVersion() { MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); setRequiredElements( artifact, VALID, VALID, EMPTY_STRING ); @@ -261,7 +265,7 @@ public class ArtifactReportProcessorTest public void testNullGroupId() { MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); setRequiredElements( artifact, null, VALID, VALID ); @@ -278,7 +282,7 @@ public class ArtifactReportProcessorTest public void testNullArtifactId() { MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); setRequiredElements( artifact, VALID, null, VALID ); @@ -295,7 +299,7 @@ public class ArtifactReportProcessorTest public void testNullVersion() { MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); setRequiredElements( artifact, VALID, VALID, null ); @@ -312,7 +316,7 @@ public class ArtifactReportProcessorTest public void testMultipleFailures() { MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); setRequiredElements( artifact, null, null, null ); @@ -337,12 +341,12 @@ public class ArtifactReportProcessorTest setRequiredElements( artifact, VALID, VALID, VALID ); MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); Dependency dependency = new Dependency(); setRequiredElements( dependency, null, VALID, VALID ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); processor.processArtifact( model, artifact, reporter, null ); @@ -362,12 +366,12 @@ public class ArtifactReportProcessorTest setRequiredElements( artifact, VALID, VALID, VALID ); MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); Dependency dependency = new Dependency(); setRequiredElements( dependency, VALID, null, VALID ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); processor.processArtifact( model, artifact, reporter, null ); @@ -387,12 +391,12 @@ public class ArtifactReportProcessorTest setRequiredElements( artifact, VALID, VALID, VALID ); MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); Dependency dependency = new Dependency(); setRequiredElements( dependency, VALID, VALID, null ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); processor.processArtifact( model, artifact, reporter, null ); @@ -412,12 +416,12 @@ public class ArtifactReportProcessorTest setRequiredElements( artifact, VALID, VALID, VALID ); MockRepositoryQueryLayer queryLayer = new MockRepositoryQueryLayer(); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); Dependency dependency = new Dependency(); setRequiredElements( dependency, null, null, null ); model.addDependency( dependency ); - queryLayer.addReturnValue( RepositoryQueryLayer.ARTIFACT_FOUND ); + queryLayer.addReturnValue( ARTIFACT_FOUND ); processor.setRepositoryQueryLayer( queryLayer ); processor.processArtifact( model, artifact, reporter, null ); diff --git a/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/CacheTest.java b/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/CacheTest.java index b0239d336..cfc83b309 100644 --- a/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/CacheTest.java +++ b/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/CacheTest.java @@ -134,10 +134,10 @@ public class CacheTest assertEquals( "check cache size", 1, cache.size() ); assertNull( "check cache miss", cache.get( "none" ) ); assertEquals( CACHE_HIT_RATIO, cache.getHitRate(), 0 ); - cache.flush(); + cache.clear(); assertNull( "check flushed object", cache.get( "key" ) ); assertEquals( (double) 0, cache.getHitRate(), 0 ); assertEquals( "check flushed cache size", 0, cache.size() ); - cache.flush(); + cache.clear(); } } diff --git a/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/MockRepositoryQueryLayer.java b/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/MockRepositoryQueryLayer.java index aeaa7e6d3..e6ec4eef3 100644 --- a/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/MockRepositoryQueryLayer.java +++ b/maven-repository-reports-standard/src/test/java/org/apache/maven/repository/reporting/MockRepositoryQueryLayer.java @@ -69,7 +69,6 @@ public class MockRepositoryQueryLayer public boolean containsArtifact( Artifact artifact, Snapshot snapshot ) { - // TODO return containsArtifact( artifact ); }