Replacing plexus-digest with archiva-checksum

git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@648115 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Joakim Erdfelt 2008-04-15 04:44:17 +00:00
parent 11e37d73c4
commit 92362e9ee3
13 changed files with 76 additions and 604 deletions

View File

@ -18,12 +18,11 @@
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-modules</artifactId>
<artifactId>archiva-base</artifactId>
<version>1.1-SNAPSHOT</version>
</parent>
<artifactId>archiva-checksum</artifactId>
<name>Archiva Checksum</name>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
@ -47,19 +46,12 @@
<dependency>
<groupId>commons-lang</groupId>
<artifactId>commons-lang</artifactId>
<version>2.1</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-io</artifactId>
<version>1.3.2</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
</dependencies>
</project>

View File

@ -141,11 +141,9 @@ public boolean isValidChecksum( ChecksumAlgorithm algorithm )
* the to the checksum.
*
* @param algorithms the algorithms to check for.
* @return true if the checksums report that the the reference file is valid.
* @throws IOException if unable to validate the checksums.
* @return true if the checksums report that the the reference file is valid, false if invalid.
*/
public boolean isValidChecksums( ChecksumAlgorithm algorithms[] )
throws IOException
{
FileInputStream fis = null;
try
@ -166,30 +164,46 @@ public boolean isValidChecksums( ChecksumAlgorithm algorithms[] )
// Any checksums?
if ( checksums.isEmpty() )
{
// No checksum objects, no checksum files, default to is valid.
return true;
// No checksum objects, no checksum files, default to is invalid.
return false;
}
// Parse file once, for all checksums.
fis = new FileInputStream( referenceFile );
Checksum.update( checksums, fis );
try
{
fis = new FileInputStream( referenceFile );
Checksum.update( checksums, fis );
}
catch ( IOException e )
{
log.warn( "Unable to update checksum:" + e.getMessage() );
return false;
}
boolean valid = true;
// check the checksum files
for ( Checksum checksum : checksums )
try
{
ChecksumAlgorithm checksumAlgorithm = checksum.getAlgorithm();
File checksumFile = getChecksumFile( checksumAlgorithm );
String rawChecksum = FileUtils.readFileToString( checksumFile );
String expectedChecksum = parseChecksum( rawChecksum, checksumAlgorithm, referenceFile.getName() );
if ( StringUtils.equalsIgnoreCase( expectedChecksum, checksum.getChecksum() ) == false )
for ( Checksum checksum : checksums )
{
valid = false;
ChecksumAlgorithm checksumAlgorithm = checksum.getAlgorithm();
File checksumFile = getChecksumFile( checksumAlgorithm );
String rawChecksum = FileUtils.readFileToString( checksumFile );
String expectedChecksum = parseChecksum( rawChecksum, checksumAlgorithm, referenceFile.getName() );
if ( StringUtils.equalsIgnoreCase( expectedChecksum, checksum.getChecksum() ) == false )
{
valid = false;
}
}
}
catch ( IOException e )
{
log.warn( "Unable to read / parse checksum: " + e.getMessage() );
return false;
}
return valid;
}

View File

@ -158,7 +158,7 @@ public void testIsValidChecksumNoChecksumFiles()
File jarFile = createTestableJar( "examples/redback-authz-open.jar", false, false );
ChecksummedFile checksummedFile = new ChecksummedFile( jarFile );
assertTrue( "ChecksummedFile.isValid(SHA1,MD5)", checksummedFile.isValidChecksums( new ChecksumAlgorithm[] {
assertFalse( "ChecksummedFile.isValid(SHA1,MD5)", checksummedFile.isValidChecksums( new ChecksumAlgorithm[] {
ChecksumAlgorithm.SHA1,
ChecksumAlgorithm.MD5 } ) );

View File

@ -46,16 +46,6 @@
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-digest</artifactId>
<exclusions>
<exclusion>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-container-default</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-component-api</artifactId>

View File

@ -1,240 +0,0 @@
package org.apache.maven.archiva.common.utils;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import org.codehaus.plexus.digest.ChecksumFile;
import org.codehaus.plexus.digest.Digester;
import org.codehaus.plexus.digest.DigesterException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Checksums utility component to validate or update checksums on Files.
*
* @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*
* @plexus.component role="org.apache.maven.archiva.common.utils.Checksums"
*/
public class Checksums
{
private Logger log = LoggerFactory.getLogger(Checksums.class);
/**
* @plexus.requirement role-hint="sha1"
*/
private Digester digestSha1;
/**
* @plexus.requirement role-hint="md5"
*/
private Digester digestMd5;
/**
* @plexus.requirement
*/
private ChecksumFile checksumFile;
public boolean check( File file )
{
boolean checksPass = true;
File sha1File = getSha1File( file );
File md5File = getMd5File( file );
// Both files missing is a failure.
if ( !sha1File.exists() && !md5File.exists() )
{
log.error( "File " + file.getPath() + " has no checksum files (sha1 or md5)." );
checksPass = false;
}
if ( sha1File.exists() )
{
// Bad sha1 checksum is a failure.
if ( !validateChecksum( sha1File, "sha1" ) )
{
log.warn( "SHA1 is incorrect for " + file.getPath() );
checksPass = false;
}
}
if ( md5File.exists() )
{
// Bad md5 checksum is a failure.
if ( !validateChecksum( md5File, "md5" ) )
{
log.warn( "MD5 is incorrect for " + file.getPath() );
checksPass = false;
}
}
// TODO: eek!
if ( !checksPass )
{
// On failure. delete files.
if ( sha1File.exists() )
{
sha1File.delete();
}
if ( md5File.exists() )
{
md5File.delete();
}
file.delete();
}
return checksPass;
}
public boolean update( File file )
{
boolean checksPass = true;
File sha1File = getSha1File( file );
File md5File = getMd5File( file );
if ( !fixChecksum( file, sha1File, digestSha1 ) )
{
checksPass = false;
}
if ( !fixChecksum( file, md5File, digestMd5 ) )
{
checksPass = false;
}
return checksPass;
}
private boolean createChecksum( File localFile, Digester digester )
{
try
{
checksumFile.createChecksum( localFile, digester );
return true;
}
catch ( DigesterException e )
{
log.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e );
return false;
}
catch ( IOException e )
{
log.warn( "Unable to create " + digester.getFilenameExtension() + " file: " + e.getMessage(), e );
return false;
}
}
private boolean fixChecksum( File localFile, File hashFile, Digester digester )
{
String ext = digester.getFilenameExtension();
if ( !hashFile.getPath().endsWith( ext ) )
{
throw new IllegalArgumentException( "Cannot fix " + hashFile.getPath() + " using " + ext + " digester." );
}
// If hashfile doesn't exist, create it.
if ( !hashFile.exists() )
{
return createChecksum( localFile, digester );
}
// Validate checksum, if bad, recreate it.
try
{
if ( checksumFile.isValidChecksum( hashFile ) )
{
log.debug( "Valid checksum: " + hashFile.getPath() );
return true;
}
else
{
log.debug( "Not valid checksum: " + hashFile.getPath() );
return createChecksum( localFile, digester );
}
}
catch ( FileNotFoundException e )
{
log.warn( "Unable to find " + ext + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
catch ( DigesterException e )
{
log.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
catch ( IOException e )
{
log.warn( "Unable to process " + ext + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
}
private File getMd5File( File file )
{
return new File( file.getAbsolutePath() + ".md5" );
}
private File getSha1File( File file )
{
return new File( file.getAbsolutePath() + ".sha1" );
}
private boolean validateChecksum( File hashFile, String type )
{
try
{
boolean validity = checksumFile.isValidChecksum( hashFile );
if ( validity )
{
log.debug( "Valid checksum: " + hashFile.getPath() );
}
else
{
log.debug( "Not valid checksum: " + hashFile.getPath() );
}
return validity;
}
catch ( FileNotFoundException e )
{
log.warn( "Unable to find " + type + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
catch ( DigesterException e )
{
log.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
catch ( IOException e )
{
log.warn( "Unable to process " + type + " file: " + hashFile.getAbsolutePath(), e );
return false;
}
}
}

View File

@ -1,291 +0,0 @@
package org.apache.maven.archiva.common.utils;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.io.FileUtils;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
/**
* ChecksumsTest
*
* @author <a href="mailto:joakime@apache.org">Joakim Erdfelt</a>
* @version $Id$
*/
public class ChecksumsTest
extends PlexusInSpringTestCase
{
private static final String GOOD = "good";
private static final String BAD = "bad";
public void testCheckOnFileOnly()
throws Exception
{
assertCheck( false, null, null );
}
public void testCheckOnFileWithBadMd5AndBadSha1()
throws Exception
{
assertCheck( false, BAD, BAD );
}
public void testCheckOnFileWithBadMd5AndGoodSha1()
throws Exception
{
assertCheck( false, BAD, GOOD );
}
public void testCheckOnFileWithBadMd5Only()
throws Exception
{
assertCheck( false, BAD, null );
}
public void testCheckOnFileWithBadSha1Only()
throws Exception
{
assertCheck( false, null, BAD );
}
public void testCheckOnFileWithGoodMd5AndBadSha1()
throws Exception
{
assertCheck( false, GOOD, BAD );
}
public void testCheckOnFileWithGoodMd5AndGoodSha1()
throws Exception
{
assertCheck( true, GOOD, GOOD );
}
public void testCheckOnFileWithGoodMd5Only()
throws Exception
{
assertCheck( true, GOOD, null );
}
public void testCheckOnFileWithGoodSha1Only()
throws Exception
{
assertCheck( true, null, GOOD );
}
public void testUpdateOnFileOnly()
throws Exception
{
assertUpdate( true, null, null );
}
public void testUpdateOnFileWithBadMd5AndBadSha1()
throws Exception
{
assertUpdate( true, BAD, BAD );
}
public void testUpdateOnFileWithBadMd5AndGoodSha1()
throws Exception
{
assertUpdate( true, BAD, GOOD );
}
public void testUpdateOnFileWithBadMd5Only()
throws Exception
{
assertUpdate( true, BAD, null );
}
public void testUpdateOnFileWithBadSha1Only()
throws Exception
{
assertUpdate( true, null, BAD );
}
public void testUpdateOnFileWithGoodMd5AndBadSha1()
throws Exception
{
assertUpdate( true, GOOD, BAD );
}
public void testUpdateOnFileWithGoodMd5AndGoodSha1()
throws Exception
{
assertUpdate( true, GOOD, GOOD );
}
public void testUpdateOnFileWithGoodMd5Only()
throws Exception
{
assertUpdate( true, GOOD, null );
}
public void testUpdateOnFileWithGoodSha1Only()
throws Exception
{
assertUpdate( true, null, GOOD );
}
private void assertCheck( boolean expectedResult, String md5State, String sha1State )
throws Exception
{
Checksums checksums = lookupChecksums();
File localFile = createTestableFiles( md5State, sha1State );
boolean actualResult = checksums.check( localFile );
String msg = createMessage( "check", md5State, sha1State );
if ( actualResult == false )
{
assertFalse( msg + " local file should not exist:", localFile.exists() );
File md5File = new File( localFile.getAbsolutePath() + ".sha1" );
File sha1File = new File( localFile.getAbsolutePath() + ".md5" );
assertFalse( msg + " local md5 file should not exist:", md5File.exists() );
assertFalse( msg + " local sha1 file should not exist:", sha1File.exists() );
}
assertEquals( msg, expectedResult, actualResult );
}
private void assertUpdate( boolean expectedResult, String md5State, String sha1State )
throws Exception
{
Checksums checksums = lookupChecksums();
File localFile = createTestableFiles( md5State, sha1State );
boolean actualResult = checksums.update( localFile );
String msg = createMessage( "update", md5State, sha1State );
assertEquals( msg, expectedResult, actualResult );
// End result should be legitimate SHA1 and MD5 files.
File md5File = new File( localFile.getAbsolutePath() + ".md5" );
File sha1File = new File( localFile.getAbsolutePath() + ".sha1" );
assertTrue( "ChecksumPolicy.apply(FIX) md5 should exist.", md5File.exists() && md5File.isFile() );
assertTrue( "ChecksumPolicy.apply(FIX) sha1 should exist.", sha1File.exists() && sha1File.isFile() );
String actualMd5Contents = readChecksumFile( md5File );
String actualSha1Contents = readChecksumFile( sha1File );
String expectedMd5Contents = "360ccd01d8a0a2d94b86f9802c2fc548 artifact.jar";
String expectedSha1Contents = "7dd8929150664f182db60ad15f20359d875f059f artifact.jar";
assertEquals( msg + ": md5 contents:", expectedMd5Contents, actualMd5Contents );
assertEquals( msg + ": sha1 contents:", expectedSha1Contents, actualSha1Contents );
}
/**
* Read the first line from the checksum file, and return it (trimmed).
*/
private String readChecksumFile( File checksumFile )
throws Exception
{
FileReader freader = null;
BufferedReader buf = null;
try
{
freader = new FileReader( checksumFile );
buf = new BufferedReader( freader );
return buf.readLine();
}
finally
{
if ( buf != null )
{
buf.close();
}
if ( freader != null )
{
freader.close();
}
}
}
private String createMessage( String method, String md5State, String sha1State )
{
StringBuffer msg = new StringBuffer();
msg.append( "Expected result of Checksums." ).append( method );
msg.append( "() when working with " );
if ( md5State == null )
{
msg.append( "NO" );
}
else
{
msg.append( "a " ).append( md5State.toUpperCase() );
}
msg.append( " MD5 and " );
if ( sha1State == null )
{
msg.append( "NO" );
}
else
{
msg.append( "a " ).append( sha1State.toUpperCase() );
}
msg.append( " SHA1:" );
return msg.toString();
}
private File createTestableFiles( String md5State, String sha1State )
throws Exception
{
File destDir = new File( "target/checksum-tests/" + getName() + "/" );
FileUtils.copyFileToDirectory( ResourceUtils.getResource( "/checksums/artifact.jar" ), destDir );
if ( md5State != null )
{
File md5File = ResourceUtils.getResource( "/checksums/artifact.jar.md5-" + md5State );
assertTrue( "Testable file exists: " + md5File.getName() + ":", md5File.exists() && md5File.isFile() );
File destFile = new File( destDir, "artifact.jar.md5" );
FileUtils.copyFile( md5File, destFile );
}
if ( sha1State != null )
{
File sha1File = ResourceUtils.getResource( "/checksums/artifact.jar.sha1-" + sha1State );
assertTrue( "Testable file exists: " + sha1File.getName() + ":", sha1File.exists() && sha1File.isFile() );
File destFile = new File( destDir, "artifact.jar.sha1" );
FileUtils.copyFile( sha1File, destFile );
}
File localFile = new File( destDir, "artifact.jar" );
return localFile;
}
private Checksums lookupChecksums()
throws Exception
{
Checksums checksums = (Checksums) lookup( Checksums.class );
assertNotNull( checksums );
return checksums;
}
}

View File

@ -32,6 +32,10 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-checksum</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.plexus</groupId>
<artifactId>plexus-spring</artifactId>

View File

@ -24,8 +24,9 @@
import java.util.List;
import java.util.Properties;
import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.checksum.ChecksummedFile;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.common.utils.Checksums;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@ -65,10 +66,7 @@ public class ChecksumPolicy
*/
public static final String FIX = "fix";
/**
* @plexus.requirement
*/
private Checksums checksums;
private ChecksumAlgorithm[] algorithms = new ChecksumAlgorithm[] { ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5 };
private List<String> options = new ArrayList<String>();
@ -105,25 +103,21 @@ public void applyPolicy( String policySetting, Properties request, File localFil
if ( FAIL.equals( policySetting ) )
{
if( checksums.check( localFile ) )
ChecksummedFile checksum = new ChecksummedFile( localFile );
if ( checksum.isValidChecksums( algorithms ) )
{
return;
}
for ( ChecksumAlgorithm algorithm : algorithms )
{
File file = new File( localFile.getAbsolutePath() + "." + algorithm.getExt() );
if ( file.exists() )
{
file.delete();
}
}
File sha1File = new File( localFile.getAbsolutePath() + ".sha1" );
File md5File = new File( localFile.getAbsolutePath() + ".md5" );
// On failure. delete files.
if ( sha1File.exists() )
{
sha1File.delete();
}
if ( md5File.exists() )
{
md5File.delete();
}
localFile.delete();
throw new PolicyViolationException( "Checksums do not match, policy set to FAIL, "
+ "deleting checksum files and local file " + localFile.getAbsolutePath() + "." );
@ -131,7 +125,8 @@ public void applyPolicy( String policySetting, Properties request, File localFil
if ( FIX.equals( policySetting ) )
{
if( checksums.update( localFile ) )
ChecksummedFile checksum = new ChecksummedFile( localFile );
if( checksum.fixChecksums( algorithms ) )
{
log.debug( "Checksum policy set to FIX, checksum files have been updated." );
return;

View File

@ -44,6 +44,10 @@
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-dependency-graph</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-checksum</artifactId>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-configuration</artifactId>

View File

@ -19,11 +19,12 @@
* under the License.
*/
import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.checksum.ChecksummedFile;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.math.NumberUtils;
import org.apache.commons.lang.time.DateUtils;
import org.apache.maven.archiva.common.utils.Checksums;
import org.apache.maven.archiva.common.utils.PathUtil;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
@ -94,11 +95,8 @@ public class MetadataTools
*/
private FileTypes filetypes;
/**
* @plexus.requirement
*/
private Checksums checksums;
private ChecksumAlgorithm[] algorithms = new ChecksumAlgorithm[] { ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5 };
private List<String> artifactPatterns;
private Map<String, Set<String>> proxies;
@ -519,7 +517,8 @@ public void updateMetadata( ManagedRepositoryContent managedRepository, ProjectR
// Save the metadata model to disk.
RepositoryMetadataWriter.write( metadata, metadataFile );
checksums.update( metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile );
checksum.fixChecksums( algorithms );
}
private Date toLastUpdatedDate( long lastUpdated )
@ -717,7 +716,8 @@ else if ( VersionUtil.isGenericSnapshot( latestVersion ) )
// Save the metadata model to disk.
RepositoryMetadataWriter.write( metadata, metadataFile );
checksums.update( metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile );
checksum.fixChecksums( algorithms );
}
private void initConfigVariables()

View File

@ -29,6 +29,7 @@
<packaging>pom</packaging>
<modules>
<module>archiva-checksum</module>
<module>archiva-common</module>
<module>archiva-policies</module>
<module>archiva-configuration</module>

View File

@ -28,7 +28,8 @@
import java.util.Collections;
import java.util.List;
import org.apache.maven.archiva.common.utils.Checksums;
import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.checksum.ChecksummedFile;
import org.apache.maven.archiva.common.utils.VersionComparator;
import org.apache.maven.archiva.common.utils.VersionUtil;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
@ -137,14 +138,11 @@ public class UploadAction
* @plexus.requirement
*/
private RepositoryContentFactory repositoryFactory;
private ChecksumAlgorithm[] algorithms = new ChecksumAlgorithm[] { ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5 };
private ProjectModelWriter pomWriter = new ProjectModel400Writer();
/**
* @plexus.requirement
*/
private Checksums checksums;
public void setUpload( File file )
{
this.file = file;
@ -422,8 +420,8 @@ private void updateMetadata( File metadataFile )
}
RepositoryMetadataWriter.write( metadata, metadataFile );
checksums.update( metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile );
checksum.fixChecksums( algorithms );
}
public void validate()

View File

@ -212,6 +212,11 @@
<artifactId>archiva-artifact-reports</artifactId>
<version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-checksum</artifactId>
<version>1.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.archiva</groupId>
<artifactId>archiva-common</artifactId>