Worked on MNG-309 and subtasks...

o Added digest creation/verification for POMs

o Added separate logging of kickouts and exclusions for ArtifactDiscoverer to keep it from polluting the repository.report.txt

o Added transactional behavior for each artifact conversion: if the artifact, its POM and all the myriad supporting files cannot be converted/created, then all files associated with that artifact are backed out of the target repo.

  NOTE: This WILL NOT happen if "--force" is used.

o Translating '+' characters to '-' in legacy artifact paths (not in v4 source repo paths, since we'll assume those are valid for use in m2). This is taking place inside the V3PomRewriter currently.

Validated all of this on localhost using a test repo. Next step is to validate on beaver with production source and test target repos, and fix anything that's outstanding (also add test cases to the test repo as needed).



git-svn-id: https://svn.apache.org/repos/asf/maven/components/trunk@164116 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
John Dennis Casey 2005-04-21 20:34:10 +00:00
parent 570d67e98c
commit a02dd8e9d9
16 changed files with 699 additions and 412 deletions

View File

@ -1,8 +1,8 @@
package org.apache.maven.tools.repoclean;
/*
* ==================================================================== Copyright 2001-2004 The
* Apache Software Foundation.
* ====================================================================
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
@ -21,13 +21,16 @@ import org.apache.maven.artifact.metadata.ArtifactMetadata;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import org.apache.maven.tools.repoclean.artifact.metadata.ProjectMetadata;
import org.apache.maven.tools.repoclean.digest.ArtifactDigestVerifier;
import org.apache.maven.tools.repoclean.digest.DigestVerifier;
import org.apache.maven.tools.repoclean.discover.ArtifactDiscoverer;
import org.apache.maven.tools.repoclean.index.ArtifactIndexer;
import org.apache.maven.tools.repoclean.report.FileReporter;
import org.apache.maven.tools.repoclean.report.PathLister;
import org.apache.maven.tools.repoclean.report.ReportWriteException;
import org.apache.maven.tools.repoclean.report.Reporter;
import org.apache.maven.tools.repoclean.rewrite.ArtifactPomRewriter;
import org.apache.maven.tools.repoclean.transaction.RewriteTransaction;
import org.apache.maven.tools.repoclean.transaction.RollbackException;
import org.codehaus.plexus.PlexusConstants;
import org.codehaus.plexus.PlexusContainer;
import org.codehaus.plexus.context.Context;
@ -48,6 +51,7 @@ import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
@ -62,7 +66,9 @@ public class RepositoryCleaner
public static final String ROLE = RepositoryCleaner.class.getName();
private ArtifactDigestVerifier artifactDigestVerifier;
private static final String REPORTS_DIR_DATE_FORMAT = "dd-MMM-yyyy_hh.mm.ss";
private DigestVerifier digestVerifier;
private ArtifactRepositoryLayout bridgingLayout;
@ -77,7 +83,7 @@ public class RepositoryCleaner
public void cleanRepository( RepositoryCleanerConfiguration configuration )
throws Exception
{
File reportsBase = normalizeReportsBase( configuration.getReportsPath() );
File reportsBase = formatReportsBase( configuration.getReportsPath() );
File sourceRepositoryBase = normalizeSourceRepositoryBase( configuration.getSourceRepositoryPath() );
@ -99,6 +105,10 @@ public class RepositoryCleaner
ArtifactDiscoverer artifactDiscoverer = null;
List artifacts = null;
PathLister kickoutLister = null;
PathLister excludeLister = null;
try
{
artifactDiscoverer = (ArtifactDiscoverer) container.lookup( ArtifactDiscoverer.ROLE, configuration
@ -111,8 +121,14 @@ public class RepositoryCleaner
try
{
File kickoutsList = new File(reportsBase, "kickouts.txt");
File excludesList = new File(reportsBase, "excludes.txt");
kickoutLister = new PathLister(kickoutsList);
excludeLister = new PathLister(excludesList);
artifacts = artifactDiscoverer.discoverArtifacts( sourceRepositoryBase, repoReporter,
configuration.getBlacklistedPatterns() );
configuration.getBlacklistedPatterns(), excludeLister, kickoutLister );
}
catch ( Exception e )
{
@ -126,6 +142,9 @@ public class RepositoryCleaner
{
container.release( artifactDiscoverer );
}
excludeLister.close();
kickoutLister.close();
}
if ( artifacts != null )
@ -257,6 +276,8 @@ public class RepositoryCleaner
{
Artifact artifact = (Artifact) it.next();
RewriteTransaction transaction = new RewriteTransaction( artifact );
String artifactReportPath = buildArtifactReportPath( artifact );
FileReporter artifactReporter = null;
@ -269,6 +290,8 @@ public class RepositoryCleaner
File artifactSource = new File( sourceRepo.getBasedir(), sourceRepo.pathOf( artifact ) );
File artifactTarget = new File( targetRepo.getBasedir(), targetRepo.pathOf( artifact ) );
transaction.addFile( artifactTarget );
artifact.setFile( artifactSource );
boolean targetMissingOrOlder = !artifactTarget.exists()
@ -278,6 +301,8 @@ public class RepositoryCleaner
{
actualRewriteCount++;
transaction.addFile( artifactTarget );
try
{
if ( !configuration.reportOnly() )
@ -291,6 +316,8 @@ public class RepositoryCleaner
File targetParent = artifactTarget.getParentFile();
if ( !targetParent.exists() )
{
transaction.addFile( targetParent );
targetParent.mkdirs();
}
@ -308,55 +335,64 @@ public class RepositoryCleaner
repoReporter.error( "Error transferring artifact[" + artifact.getId()
+ "] to the target repository.", e );
// if we can't copy the jar over, then skip the rest.
errorOccurred = true;
throw e;
}
if ( !errorOccurred )
if ( logger.isDebugEnabled() )
{
if ( logger.isDebugEnabled() )
{
logger.debug( "working on digest for artifact[" + artifact.getId()
+ "] with groupId: \'" + artifact.getGroupId() + "\'" );
}
try
{
artifactDigestVerifier.verifyDigest( artifact, artifactTarget, artifactReporter,
configuration.reportOnly() );
}
catch ( Exception e )
{
repoReporter.error( "Error verifying digest for artifact[" + artifact.getId() + "]", e );
}
logger.debug( "working on digest for artifact[" + artifact.getId() + "] with groupId: \'"
+ artifact.getGroupId() + "\'" );
}
if ( !errorOccurred )
try
{
ArtifactMetadata pom = new ProjectMetadata( artifact );
digestVerifier.verifyDigest( artifactSource, artifactTarget, transaction,
artifactReporter, configuration.reportOnly() );
}
catch ( Exception e )
{
repoReporter.error( "Error verifying digest for artifact[" + artifact.getId() + "]", e );
artifactPomRewriter = (ArtifactPomRewriter) container.lookup( ArtifactPomRewriter.ROLE,
configuration
.getSourcePomVersion() );
throw e;
}
File sourcePom = new File( sourceRepositoryBase, sourceRepo.pathOfMetadata( pom ) );
ArtifactMetadata pom = new ProjectMetadata( artifact );
File targetPom = new File( targetRepositoryBase, targetRepo.pathOfMetadata( pom ) );
artifactPomRewriter = (ArtifactPomRewriter) container.lookup( ArtifactPomRewriter.ROLE,
configuration
.getSourcePomVersion() );
File bridgedTargetPom = new File( targetRepositoryBase, bridgingLayout.pathOfMetadata( pom ) );
File sourcePom = new File( sourceRepositoryBase, sourceRepo.pathOfMetadata( pom ) );
try
File targetPom = new File( targetRepositoryBase, targetRepo.pathOfMetadata( pom ) );
transaction.addFile( targetPom );
File bridgedTargetPom = new File( targetRepositoryBase, bridgingLayout.pathOfMetadata( pom ) );
transaction.addFile( bridgedTargetPom );
try
{
artifactPomRewriter.rewrite( artifact, sourcePom, targetPom, artifactReporter,
configuration.reportOnly() );
boolean wroteBridge = bridgePomLocations( targetPom, bridgedTargetPom, artifactReporter );
digestVerifier.verifyDigest( sourcePom, targetPom, transaction,
artifactReporter, configuration.reportOnly() );
if(wroteBridge)
{
artifactPomRewriter.rewrite( artifact, sourcePom, targetPom, artifactReporter,
configuration.reportOnly() );
bridgePomLocations( targetPom, bridgedTargetPom, artifactReporter );
}
catch ( Exception e )
{
repoReporter.error( "Error rewriting POM for artifact[" + artifact.getId()
+ "] into the target repository.", e );
digestVerifier.verifyDigest( sourcePom, bridgedTargetPom, transaction,
artifactReporter, configuration.reportOnly() );
}
}
catch ( Exception e )
{
repoReporter.error( "Error rewriting POM for artifact[" + artifact.getId()
+ "] into the target repository.\n Error message: " + e.getMessage() );
}
}
@ -380,6 +416,18 @@ public class RepositoryCleaner
}
catch ( Exception e )
{
if ( !configuration.force() )
{
try
{
transaction.rollback();
}
catch ( RollbackException re )
{
repoReporter.error( "Error rolling back conversion transaction.", re );
}
}
artifactReporter.error( "Error while rewriting file or POM for artifact: \'" + artifact.getId()
+ "\'. See report at: \'" + artifactReportPath + "\'.", e );
}
@ -404,13 +452,15 @@ public class RepositoryCleaner
}
}
private void bridgePomLocations( File targetPom, File bridgedTargetPom, Reporter reporter )
private boolean bridgePomLocations( File targetPom, File bridgedTargetPom, Reporter reporter )
throws IOException, ReportWriteException
{
if ( targetPom.equals( bridgedTargetPom ) )
{
reporter.warn( "Cannot create legacy-compatible copy of POM at: " + targetPom
+ "; legacy-compatible path is the same as the converted POM itself." );
return false;
}
FileInputStream in = null;
@ -428,6 +478,8 @@ public class RepositoryCleaner
IOUtil.close( in );
IOUtil.close( out );
}
return true;
}
private String buildArtifactReportPath( Artifact artifact )
@ -522,23 +574,30 @@ public class RepositoryCleaner
return sourceRepositoryBase;
}
private File normalizeReportsBase( String reportsPath )
private File formatReportsBase( String reportsPath )
{
Logger logger = getLogger();
File reportsBase = new File( reportsPath );
if ( !reportsBase.exists() )
{
logger.info( "Creating reports directory: \'" + reportsBase + "\'" );
SimpleDateFormat dateFormat = new SimpleDateFormat( REPORTS_DIR_DATE_FORMAT );
reportsBase.mkdirs();
}
else if ( !reportsBase.isDirectory() )
String subdir = dateFormat.format( new Date() );
File allReportsBase = new File( reportsPath );
File reportsBase = new File( allReportsBase, subdir );
if ( reportsBase.exists() && !reportsBase.isDirectory() )
{
logger.error( "Cannot write reports to \'" + reportsBase + "\' because it is not a directory." );
reportsBase = null;
}
else
{
logger.info( "Creating reports directory: \'" + reportsBase + "\'" );
reportsBase.mkdirs();
}
return reportsBase;
}

View File

@ -20,16 +20,16 @@ package org.apache.maven.tools.repoclean.digest;
/**
* @author jdcasey
*/
public class ArtifactDigestException
public class DigestException
extends Exception
{
public ArtifactDigestException( String message )
public DigestException( String message )
{
super( message );
}
public ArtifactDigestException( String message, Throwable cause )
public DigestException( String message, Throwable cause )
{
super( message, cause );
}

View File

@ -20,25 +20,25 @@ package org.apache.maven.tools.repoclean.digest;
/**
* @author jdcasey
*/
public class ArtifactDigestVerificationException
public class DigestVerificationException
extends Exception
{
public ArtifactDigestVerificationException()
public DigestVerificationException()
{
}
public ArtifactDigestVerificationException( String message )
public DigestVerificationException( String message )
{
super( message );
}
public ArtifactDigestVerificationException( String message, Throwable cause )
public DigestVerificationException( String message, Throwable cause )
{
super( message, cause );
}
public ArtifactDigestVerificationException( Throwable cause )
public DigestVerificationException( Throwable cause )
{
super( cause );
}

View File

@ -17,9 +17,9 @@ package org.apache.maven.tools.repoclean.digest;
* ====================================================================
*/
import org.apache.maven.artifact.Artifact;
import org.apache.maven.tools.repoclean.report.ReportWriteException;
import org.apache.maven.tools.repoclean.report.Reporter;
import org.apache.maven.tools.repoclean.transaction.RewriteTransaction;
import org.codehaus.plexus.util.FileUtils;
import java.io.File;
@ -28,35 +28,37 @@ import java.io.IOException;
/**
* @author jdcasey
*/
public class ArtifactDigestVerifier
public class DigestVerifier
{
public static final String ROLE = ArtifactDigestVerifier.class.getName();
public static final String ROLE = DigestVerifier.class.getName();
private ArtifactDigestor artifactDigestor;
private Digestor artifactDigestor;
public void setArtifactDigestor(ArtifactDigestor artifactDigestor)
public void setArtifactDigestor(Digestor artifactDigestor)
{
this.artifactDigestor = artifactDigestor;
}
public void verifyDigest( Artifact artifact, File artifactTarget, Reporter reporter, boolean reportOnly )
throws ArtifactDigestException, ReportWriteException, IOException
public void verifyDigest( File source, File target, RewriteTransaction transaction, Reporter reporter, boolean reportOnly )
throws DigestException, ReportWriteException, IOException
{
verifyDigestFile( artifact, artifactTarget, reporter, reportOnly, ".md5", ArtifactDigestor.MD5 );
verifyDigestFile( source, target, transaction, reporter, reportOnly, ".md5", Digestor.MD5 );
verifyDigestFile( artifact, artifactTarget, reporter, reportOnly, ".sha1", ArtifactDigestor.SHA );
verifyDigestFile( source, target, transaction, reporter, reportOnly, ".sha1", Digestor.SHA );
}
private void verifyDigestFile( Artifact artifact, File artifactTarget, Reporter reporter, boolean reportOnly,
private void verifyDigestFile( File artifactSource, File artifactTarget, RewriteTransaction transaction, Reporter reporter, boolean reportOnly,
String digestExt, String digestAlgorithm )
throws ArtifactDigestException, ReportWriteException, IOException
throws DigestException, ReportWriteException, IOException
{
// create the digest source file from which to copy/verify.
File digestSourceFile = new File( artifact.getFile() + digestExt );
File digestSourceFile = new File( artifactSource + digestExt );
// create the digest target file from which to copy/create.
File digestTargetFile = new File( artifactTarget + digestExt );
transaction.addFile( digestTargetFile );
boolean verified = false;
@ -75,7 +77,7 @@ public class ArtifactDigestVerifier
}
catch ( IOException e )
{
reporter.error( "Cannot copy digest file for artifact[" + artifact.getId()
reporter.error( "Cannot copy digest file for path [" + artifactSource
+ "] from source to target for digest algorithm: \'" + digestAlgorithm + "\'.", e );
throw e;
@ -84,12 +86,12 @@ public class ArtifactDigestVerifier
}
else
{
reporter.warn( digestExt + " for artifact[" + artifact.getId() + "] in target repository is wrong." );
reporter.warn( digestExt + " for path [" + artifactSource + "] in target repository is wrong." );
}
}
else
{
reporter.warn( digestExt + " for artifact[" + artifact.getId() + "] is missing in source repository." );
reporter.warn( digestExt + " for path [" + artifactSource + "] is missing in source repository." );
}
// if the .md5 was missing or did not verify correctly, create a new one

View File

@ -16,17 +16,17 @@ import java.security.NoSuchAlgorithmException;
/**
* @author jdcasey
*/
public class ArtifactDigestor
public class Digestor
{
public static final String ROLE = ArtifactDigestor.class.getName();
public static final String ROLE = Digestor.class.getName();
public static final String MD5 = "MD5";
public static final String SHA = "SHA";
public void createArtifactDigest( File artifactFile, File digestFile, String algorithm )
throws ArtifactDigestException
throws DigestException
{
byte[] digestData = generateArtifactDigest( artifactFile, algorithm );
@ -36,12 +36,12 @@ public class ArtifactDigestor
}
catch ( IOException e )
{
throw new ArtifactDigestException( "Cannot write digest to file: \'" + digestFile + "\'", e );
throw new DigestException( "Cannot write digest to file: \'" + digestFile + "\'", e );
}
}
public boolean verifyArtifactDigest( File artifactFile, File digestFile, String algorithm )
throws ArtifactDigestException
throws DigestException
{
if ( artifactFile.exists() && digestFile.exists() )
{
@ -68,7 +68,7 @@ public class ArtifactDigestor
}
catch ( IOException e )
{
throw new ArtifactDigestException( "Cannot verify digest for artifact file: \'" + artifactFile
throw new DigestException( "Cannot verify digest for artifact file: \'" + artifactFile
+ "\' against digest file: \'" + digestFile + "\' using algorithm: \'" + algorithm + "\'", e );
}
finally
@ -86,7 +86,7 @@ public class ArtifactDigestor
}
public byte[] generateArtifactDigest( File artifactFile, String algorithm )
throws ArtifactDigestException
throws DigestException
{
MessageDigest digest = null;
try
@ -95,7 +95,7 @@ public class ArtifactDigestor
}
catch ( NoSuchAlgorithmException e )
{
throw new ArtifactDigestException( "Cannot load digest algoritm provider.", e );
throw new DigestException( "Cannot load digest algoritm provider.", e );
}
InputStream in = null;
@ -112,7 +112,7 @@ public class ArtifactDigestor
}
catch ( IOException e )
{
throw new ArtifactDigestException( "Error reading artifact data from: \'" + artifactFile + "\'", e );
throw new DigestException( "Error reading artifact data from: \'" + artifactFile + "\'", e );
}
finally
{

View File

@ -0,0 +1,68 @@
package org.apache.maven.tools.repoclean.discover;
import org.apache.maven.tools.repoclean.report.PathLister;
import org.apache.maven.tools.repoclean.report.ReportWriteException;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.DirectoryScanner;
import java.io.File;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public abstract class AbstractArtifactDiscoverer
extends AbstractLogEnabled
implements ArtifactDiscoverer
{
protected String[] scanForArtifactPaths( File repositoryBase, String blacklistedPatterns, PathLister excludesLister )
throws ReportWriteException
{
String[] blacklisted = null;
if ( blacklistedPatterns != null && blacklistedPatterns.length() > 0 )
{
blacklisted = blacklistedPatterns.split( "," );
}
else
{
blacklisted = new String[0];
}
String[] allExcludes = new String[STANDARD_DISCOVERY_EXCLUDES.length + blacklisted.length];
System.arraycopy( STANDARD_DISCOVERY_EXCLUDES, 0, allExcludes, 0, STANDARD_DISCOVERY_EXCLUDES.length );
System.arraycopy( blacklisted, 0, allExcludes, STANDARD_DISCOVERY_EXCLUDES.length, blacklisted.length );
DirectoryScanner scanner = new DirectoryScanner();
scanner.setBasedir( repositoryBase );
scanner.setExcludes( allExcludes );
scanner.scan();
String[] artifactPaths = scanner.getIncludedFiles();
String[] excludedPaths = scanner.getExcludedFiles();
for ( int i = 0; i < excludedPaths.length; i++ )
{
String excludedPath = excludedPaths[i];
excludesLister.addPath( excludedPath );
}
return artifactPaths;
}
}

View File

@ -1,6 +1,7 @@
package org.apache.maven.tools.repoclean.discover;
import org.apache.maven.tools.repoclean.report.FileReporter;
import org.apache.maven.tools.repoclean.report.PathLister;
import org.apache.maven.tools.repoclean.report.Reporter;
import java.io.File;
import java.util.List;
@ -32,6 +33,9 @@ public interface ArtifactDiscoverer
".maven/**",
"**/poms/*.pom",
"**/*.md5",
"**/*.MD5",
"**/*.sha1",
"**/*.SHA1",
"**/*snapshot-version",
"*/website/**",
"*/licenses/**",
@ -39,9 +43,14 @@ public interface ArtifactDiscoverer
"**/.htaccess",
"**/*.html",
"**/*.asc",
"**/*.txt" };
"**/*.txt",
"**/*.xml",
"**/README*",
"**/CHANGELOG*",
"**/KEYS*" };
List discoverArtifacts( File repositoryBase, FileReporter reporter, String blacklistedPatterns )
List discoverArtifacts( File repositoryBase, Reporter reporter, String blacklistedPatterns,
PathLister excludeLister, PathLister kickoutLister )
throws Exception;
}

View File

@ -4,8 +4,8 @@ import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.construction.ArtifactConstructionSupport;
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
import org.apache.maven.tools.repoclean.report.FileReporter;
import org.codehaus.plexus.util.DirectoryScanner;
import org.apache.maven.tools.repoclean.report.PathLister;
import org.apache.maven.tools.repoclean.report.Reporter;
import org.codehaus.plexus.util.IOUtil;
import java.io.File;
@ -31,44 +31,23 @@ import java.util.List;
* @author jdcasey
*/
public class DefaultArtifactDiscoverer
implements ArtifactDiscoverer
extends AbstractArtifactDiscoverer
{
private ArtifactConstructionSupport artifactConstructionSupport = new ArtifactConstructionSupport();
public List discoverArtifacts( File repositoryBase, FileReporter reporter, String blacklistedPatterns )
public List discoverArtifacts( File repositoryBase, Reporter reporter, String blacklistedPatterns, PathLister excludeLister, PathLister kickoutLister )
throws Exception
{
List artifacts = new ArrayList();
String[] blacklisted = null;
if ( blacklistedPatterns != null && blacklistedPatterns.length() > 0 )
{
blacklisted = blacklistedPatterns.split( "," );
}
else
{
blacklisted = new String[0];
}
String[] allExcludes = new String[STANDARD_DISCOVERY_EXCLUDES.length + blacklisted.length];
System.arraycopy( STANDARD_DISCOVERY_EXCLUDES, 0, allExcludes, 0, STANDARD_DISCOVERY_EXCLUDES.length );
System.arraycopy( blacklisted, 0, allExcludes, 0, blacklisted.length );
DirectoryScanner scanner = new DirectoryScanner();
scanner.setBasedir( repositoryBase );
scanner.setExcludes( allExcludes );
scanner.scan();
String[] artifactPaths = scanner.getIncludedFiles();
String[] artifactPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns, excludeLister );
for ( int i = 0; i < artifactPaths.length; i++ )
{
String path = artifactPaths[i];
Artifact artifact = buildArtifact( repositoryBase, path, reporter );
Artifact artifact = buildArtifact( repositoryBase, path, kickoutLister );
if ( artifact != null )
{
@ -79,7 +58,7 @@ public class DefaultArtifactDiscoverer
return artifacts;
}
private Artifact buildArtifact( File repositoryBase, String path, FileReporter reporter )
private Artifact buildArtifact( File repositoryBase, String path, PathLister kickoutLister )
throws Exception
{
Artifact result = null;
@ -88,8 +67,7 @@ public class DefaultArtifactDiscoverer
if ( lastDot < 0 )
{
reporter.error( "Found potential artifact file with invalid name. Path: \'" + path
+ "\' doesn't seem to contain a file extension." );
kickoutLister.addPath(path);
}
else
{
@ -117,8 +95,7 @@ public class DefaultArtifactDiscoverer
}
else
{
reporter.error( "POM not found for potential artifact at \'" + path
+ "\'. Cannot create Artifact instance." );
kickoutLister.addPath(path);
}
}

View File

@ -16,9 +16,8 @@ package org.apache.maven.tools.repoclean.discover;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.construction.ArtifactConstructionSupport;
import org.apache.maven.tools.repoclean.report.FileReporter;
import org.codehaus.plexus.logging.AbstractLogEnabled;
import org.codehaus.plexus.util.DirectoryScanner;
import org.apache.maven.tools.repoclean.report.PathLister;
import org.apache.maven.tools.repoclean.report.Reporter;
import java.io.File;
import java.util.ArrayList;
@ -32,54 +31,24 @@ import java.util.StringTokenizer;
* @author jdcasey
*/
public class LegacyArtifactDiscoverer
extends AbstractLogEnabled
implements ArtifactDiscoverer
extends AbstractArtifactDiscoverer
{
private ArtifactConstructionSupport artifactConstructionSupport = new ArtifactConstructionSupport();
public List discoverArtifacts( File repositoryBase, FileReporter reporter, String blacklistedPatterns )
public List discoverArtifacts( File repositoryBase, Reporter reporter, String blacklistedPatterns,
PathLister excludeLister, PathLister kickoutLister )
throws Exception
{
List artifacts = new ArrayList();
String[] blacklisted = null;
if ( blacklistedPatterns != null && blacklistedPatterns.length() > 0 )
{
blacklisted = blacklistedPatterns.split( "," );
}
else
{
blacklisted = new String[0];
}
String[] allExcludes = null;
if ( blacklisted != null && blacklisted.length > 0 )
{
allExcludes = new String[STANDARD_DISCOVERY_EXCLUDES.length + blacklisted.length ];
System.arraycopy( STANDARD_DISCOVERY_EXCLUDES, 0, allExcludes, 0, STANDARD_DISCOVERY_EXCLUDES.length );
System.arraycopy( blacklisted, 0, allExcludes, STANDARD_DISCOVERY_EXCLUDES.length, blacklisted.length );
}
else
{
allExcludes = STANDARD_DISCOVERY_EXCLUDES;
}
DirectoryScanner scanner = new DirectoryScanner();
scanner.setBasedir( repositoryBase );
scanner.setExcludes( allExcludes );
scanner.scan();
String[] artifactPaths = scanner.getIncludedFiles();
String[] artifactPaths = scanForArtifactPaths( repositoryBase, blacklistedPatterns, excludeLister );
for ( int i = 0; i < artifactPaths.length; i++ )
{
String path = artifactPaths[i];
Artifact artifact = buildArtifact( path, reporter );
Artifact artifact = buildArtifact( path, kickoutLister );
if ( artifact != null )
{
artifacts.add( artifact );
@ -89,241 +58,257 @@ public class LegacyArtifactDiscoverer
return artifacts;
}
private Artifact buildArtifact( String path, FileReporter reporter )
private Artifact buildArtifact( String path, PathLister kickoutLister )
throws Exception
{
StringTokenizer tokens = new StringTokenizer( path, "/\\" );
int numberOfTokens = tokens.countTokens();
if ( numberOfTokens != 3 )
try
{
reporter.warn( "Artifact path: \'" + path
+ "\' does not match naming convention. Cannot reliably extract artifact information from path." );
StringTokenizer tokens = new StringTokenizer( path, "/\\" );
return null;
}
int numberOfTokens = tokens.countTokens();
String groupId = tokens.nextToken();
String type = tokens.nextToken();
if ( type.endsWith( "s" ) )
{
type = type.substring( 0, type.length() - 1 );
}
// contains artifactId, version, classifier, and extension.
String avceGlob = tokens.nextToken();
LinkedList avceTokenList = new LinkedList();
StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
while ( avceTokenizer.hasMoreTokens() )
{
avceTokenList.addLast( avceTokenizer.nextToken() );
}
String lastAvceToken = (String) avceTokenList.removeLast();
if ( lastAvceToken.endsWith( ".tar.gz" ) )
{
type = "distribution-tgz";
lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
avceTokenList.addLast( lastAvceToken );
}
else if ( lastAvceToken.endsWith( ".zip" ) )
{
type = "distribution-zip";
lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
avceTokenList.addLast( lastAvceToken );
}
else
{
int extPos = lastAvceToken.lastIndexOf( '.' );
if ( extPos > 0 )
if ( numberOfTokens != 3 )
{
String ext = lastAvceToken.substring( extPos + 1 );
if ( type.equals( ext ) )
{
lastAvceToken = lastAvceToken.substring( 0, extPos );
kickoutLister.addPath(path);
avceTokenList.addLast( lastAvceToken );
return null;
}
String groupId = tokens.nextToken();
String type = tokens.nextToken();
if ( type.endsWith( "s" ) )
{
type = type.substring( 0, type.length() - 1 );
}
// contains artifactId, version, classifier, and extension.
String avceGlob = tokens.nextToken();
LinkedList avceTokenList = new LinkedList();
StringTokenizer avceTokenizer = new StringTokenizer( avceGlob, "-" );
while ( avceTokenizer.hasMoreTokens() )
{
avceTokenList.addLast( avceTokenizer.nextToken() );
}
String lastAvceToken = (String) avceTokenList.removeLast();
if ( lastAvceToken.endsWith( ".tar.gz" ) )
{
type = "distribution-tgz";
lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".tar.gz".length() );
avceTokenList.addLast( lastAvceToken );
}
else if ( lastAvceToken.endsWith( ".zip" ) )
{
type = "distribution-zip";
lastAvceToken = lastAvceToken.substring( 0, lastAvceToken.length() - ".zip".length() );
avceTokenList.addLast( lastAvceToken );
}
else
{
int extPos = lastAvceToken.lastIndexOf( '.' );
if ( extPos > 0 )
{
String ext = lastAvceToken.substring( extPos + 1 );
if ( type.equals( ext ) )
{
lastAvceToken = lastAvceToken.substring( 0, extPos );
avceTokenList.addLast( lastAvceToken );
}
else
{
kickoutLister.addPath(path);
return null;
}
}
}
String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|"
+ "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|"
+ "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|"
+ "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|"
+ "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|"
+ "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|"
+ "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])";
// let's discover the version, and whatever's leftover will be either
// a classifier, or part of the artifactId, depending on position.
// Since version is at the end, we have to move in from the back.
Collections.reverse( avceTokenList );
StringBuffer classifierBuffer = new StringBuffer();
StringBuffer versionBuffer = new StringBuffer();
boolean firstVersionTokenEncountered = false;
boolean firstToken = true;
int tokensIterated = 0;
for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
{
String token = (String) it.next();
boolean tokenIsVersionPart = token.matches( validVersionParts );
StringBuffer bufferToUpdate = null;
// NOTE: logic in code is reversed, since we're peeling off the back
// Any token after the last versionPart will be in the classifier.
// Any token UP TO first non-versionPart is part of the version.
if ( !tokenIsVersionPart )
{
if ( firstVersionTokenEncountered )
{
break;
}
else
{
bufferToUpdate = classifierBuffer;
}
}
else
{
reporter
.warn( "Artifact path: \'"
+ path
+ "\' does not match naming convention. Cannot reliably extract artifact information from path." );
firstVersionTokenEncountered = true;
return null;
bufferToUpdate = versionBuffer;
}
}
}
String validVersionParts = "([Dd][Ee][Vv][_.0-9]*)|" + "([Ss][Nn][Aa][Pp][Ss][Hh][Oo][Tt])|"
+ "([0-9][_.0-9a-zA-Z]*)|" + "([Gg]?[_.0-9ab]*([Pp][Rr][Ee]|[Rr][Cc]|[Gg]|[Mm])[_.0-9]*)|"
+ "([Aa][Ll][Pp][Hh][Aa][_.0-9]*)|" + "([Bb][Ee][Tt][Aa][_.0-9]*)|" + "([Rr][Cc][_.0-9]*)|"
+ "([Tt][Ee][Ss][Tt][_.0-9]*)|" + "([Dd][Ee][Bb][Uu][Gg][_.0-9]*)|"
+ "([Uu][Nn][Oo][Ff][Ff][Ii][Cc][Ii][Aa][Ll][_.0-9]*)|" + "([Cc][Uu][Rr][Rr][Ee][Nn][Tt])|"
+ "([Ll][Aa][Tt][Ee][Ss][Tt])|" + "([Ff][Cc][Ss])|" + "([Rr][Ee][Ll][Ee][Aa][Ss][Ee][_.0-9]*)|"
+ "([Nn][Ii][Gg][Hh][Tt][Ll][Yy])";
// let's discover the version, and whatever's leftover will be either
// a classifier, or part of the artifactId, depending on position.
// Since version is at the end, we have to move in from the back.
Collections.reverse( avceTokenList );
StringBuffer classifierBuffer = new StringBuffer();
StringBuffer versionBuffer = new StringBuffer();
boolean firstVersionTokenEncountered = false;
boolean firstToken = true;
int tokensIterated = 0;
for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
{
String token = (String) it.next();
boolean tokenIsVersionPart = token.matches( validVersionParts );
StringBuffer bufferToUpdate = null;
// NOTE: logic in code is reversed, since we're peeling off the back
// Any token after the last versionPart will be in the classifier.
// Any token UP TO first non-versionPart is part of the version.
if ( !tokenIsVersionPart )
{
if ( firstVersionTokenEncountered )
if ( firstToken )
{
break;
firstToken = false;
}
else
{
bufferToUpdate = classifierBuffer;
bufferToUpdate.insert( 0, '-' );
}
bufferToUpdate.insert( 0, token );
tokensIterated++;
}
getLogger().debug(
"After parsing loop, state of buffers:\no Version Buffer: \'" + versionBuffer
+ "\'\no Classifier Buffer: \'" + classifierBuffer
+ "\'\no Number of Tokens Iterated: " + tokensIterated );
// Now, restore the proper ordering so we can build the artifactId.
Collections.reverse( avceTokenList );
getLogger().debug(
"Before repairing bad version and/or cleaning up used tokens, avce token list is:\n"
+ avceTokenList );
// if we didn't find a version, then punt. Use the last token
// as the version, and set the classifier empty.
if ( versionBuffer.length() < 1 )
{
if ( avceTokenList.size() > 1 )
{
int lastIdx = avceTokenList.size() - 1;
versionBuffer.append( avceTokenList.get( lastIdx ) );
avceTokenList.remove( lastIdx );
}
else
{
getLogger().warn( "Cannot parse version from artifact path: \'" + path + "\'." );
getLogger().info(
"artifact-version-classifier-extension remaining tokens is: \'" + avceTokenList
+ "\'" );
}
classifierBuffer.setLength( 0 );
}
else
{
firstVersionTokenEncountered = true;
getLogger().debug( "Removing " + tokensIterated + " tokens from avce token list." );
bufferToUpdate = versionBuffer;
// if everything is kosher, then pop off all the classifier and
// version tokens, leaving the naked artifact id in the list.
avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - ( tokensIterated ) ) );
}
if ( firstToken )
getLogger().debug( "Now, remainder of avce token list is:\n" + avceTokenList );
StringBuffer artifactIdBuffer = new StringBuffer();
firstToken = true;
for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
{
firstToken = false;
String token = (String) it.next();
if ( firstToken )
{
firstToken = false;
}
else
{
artifactIdBuffer.append( '-' );
}
artifactIdBuffer.append( token );
}
String artifactId = artifactIdBuffer.toString();
int lastVersionCharIdx = versionBuffer.length() - 1;
if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
{
versionBuffer.setLength( lastVersionCharIdx );
}
String version = versionBuffer.toString();
if ( version.length() < 1 )
{
version = null;
}
getLogger().debug(
"Extracted artifact information from path:\n" + "groupId: \'" + groupId + "\'\n"
+ "artifactId: \'" + artifactId + "\'\n" + "type: \'" + type + "\'\n"
+ "version: \'" + version + "\'\n" + "classifier: \'" + classifierBuffer.toString()
+ "\'" );
Artifact result = null;
if ( classifierBuffer.length() > 0 )
{
getLogger().debug( "Creating artifact with classifier." );
result = artifactConstructionSupport.createArtifactWithClassifier( groupId, artifactId, version,
Artifact.SCOPE_RUNTIME, type,
classifierBuffer.toString() );
}
else
{
bufferToUpdate.insert( 0, '-' );
result = artifactConstructionSupport.createArtifact( groupId, artifactId, version,
Artifact.SCOPE_RUNTIME, type );
}
bufferToUpdate.insert( 0, token );
getLogger().debug(
"Resulting artifact is: " + result.getId() + " and has classifier of: "
+ result.getClassifier() + "\n\n" );
tokensIterated++;
return result;
}
getLogger().debug(
"After parsing loop, state of buffers:\no Version Buffer: \'" + versionBuffer
+ "\'\no Classifier Buffer: \'" + classifierBuffer
+ "\'\no Number of Tokens Iterated: " + tokensIterated );
// Now, restore the proper ordering so we can build the artifactId.
Collections.reverse( avceTokenList );
getLogger().debug(
"Before repairing bad version and/or cleaning up used tokens, avce token list is:\n"
+ avceTokenList );
// if we didn't find a version, then punt. Use the last token
// as the version, and set the classifier empty.
if ( versionBuffer.length() < 1 )
catch ( RuntimeException e )
{
int lastIdx = avceTokenList.size() - 1;
getLogger().error( "While parsing artifact path: \'" + path + "\'...\n\n", e );
versionBuffer.append( avceTokenList.get( lastIdx ) );
avceTokenList.remove( lastIdx );
classifierBuffer.setLength( 0 );
throw e;
}
else
{
getLogger().debug( "Removing " + tokensIterated + " tokens from avce token list." );
// if everything is kosher, then pop off all the classifier and
// version tokens, leaving the naked artifact id in the list.
avceTokenList = new LinkedList( avceTokenList.subList( 0, avceTokenList.size() - ( tokensIterated ) ) );
}
getLogger().debug( "Now, remainder of avce token list is:\n" + avceTokenList );
StringBuffer artifactIdBuffer = new StringBuffer();
firstToken = true;
for ( Iterator it = avceTokenList.iterator(); it.hasNext(); )
{
String token = (String) it.next();
if ( firstToken )
{
firstToken = false;
}
else
{
artifactIdBuffer.append( '-' );
}
artifactIdBuffer.append( token );
}
String artifactId = artifactIdBuffer.toString();
int lastVersionCharIdx = versionBuffer.length() - 1;
if ( lastVersionCharIdx > -1 && versionBuffer.charAt( lastVersionCharIdx ) == '-' )
{
versionBuffer.setLength( lastVersionCharIdx );
}
String version = versionBuffer.toString();
if ( version.length() < 1 )
{
version = null;
}
getLogger().debug(
"Extracted artifact information from path:\n" + "groupId: \'" + groupId + "\'\n"
+ "artifactId: \'" + artifactId + "\'\n" + "type: \'" + type + "\'\n" + "version: \'"
+ version + "\'\n" + "classifier: \'" + classifierBuffer.toString() + "\'" );
Artifact result = null;
if ( classifierBuffer.length() > 0 )
{
getLogger().debug( "Creating artifact with classifier." );
result = artifactConstructionSupport.createArtifactWithClassifier( groupId, artifactId, version,
Artifact.SCOPE_RUNTIME, type,
classifierBuffer.toString() );
}
else
{
result = artifactConstructionSupport.createArtifact( groupId, artifactId, version, Artifact.SCOPE_RUNTIME,
type );
}
getLogger().debug(
"Resulting artifact is: " + result.getId() + " and has classifier of: "
+ result.getClassifier() + "\n\n" );
return result;
}
}

View File

@ -0,0 +1,86 @@
package org.apache.maven.tools.repoclean.report;
import org.codehaus.plexus.util.IOUtil;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class PathLister
{
private final File listFile;
private Writer writer;
public PathLister( File listFile )
{
this.listFile = listFile;
}
private synchronized void checkOpen() throws ReportWriteException
{
if(writer == null)
{
try
{
writer = new FileWriter(listFile);
}
catch ( IOException e )
{
throw new ReportWriteException( "Cannot open listFile for writing: " + listFile, e );
}
}
}
public void close()
{
IOUtil.close( writer );
}
public void addPath( String path ) throws ReportWriteException
{
checkOpen();
try
{
writer.write( path + "\n" );
}
catch ( IOException e )
{
throw new ReportWriteException( "Cannot write path: " + path + " to listFile: " + listFile, e );
}
}
public void addPath( File path ) throws ReportWriteException
{
checkOpen();
try
{
writer.write( path + "\n" );
}
catch ( IOException e )
{
throw new ReportWriteException( "Cannot write path: " + path + " to listFile: " + listFile, e );
}
}
}

View File

@ -41,6 +41,13 @@ public class V3PomRewriter
public void rewrite( Artifact artifact, File from, File to, FileReporter reporter, boolean reportOnly )
throws Exception
{
// should only have to handle this here...v4 repos shouldn't have this
// problem...
String toPath = to.getPath();
toPath = toPath.replace( '+', '-' );
File target = new File( toPath );
Model v4Model = null;
if ( from.exists() )
@ -60,6 +67,8 @@ public class V3PomRewriter
catch ( Exception e )
{
reporter.error( "Invalid v3 POM at: \'" + from + "\'. Cannot read.", e );
throw e;
}
if(v3Model != null)
@ -95,7 +104,7 @@ public class V3PomRewriter
FileWriter toWriter = null;
try
{
toWriter = new FileWriter( to );
toWriter = new FileWriter( target );
MavenXpp3Writer v4Writer = new MavenXpp3Writer();
v4Writer.write( toWriter, v4Model );
}

View File

@ -0,0 +1,57 @@
package org.apache.maven.tools.repoclean.transaction;
import org.apache.maven.artifact.Artifact;
import java.io.File;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class RewriteTransaction
{
private final Artifact artifact;
private List files = new ArrayList();
public RewriteTransaction( Artifact artifact )
{
this.artifact = artifact;
}
public void addFile( File file )
{
this.files.add( file );
}
public void rollback()
throws RollbackException
{
for ( Iterator it = files.iterator(); it.hasNext(); )
{
File file = (File) it.next();
if ( file.exists() && !file.delete() )
{
throw new RollbackException( "[rollback] Cannot delete file: " + file
+ "\nPart of transaction for artifact: {" + artifact.getId() + "}." );
}
}
}
}

View File

@ -0,0 +1,33 @@
package org.apache.maven.tools.repoclean.transaction;
/*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class RollbackException
extends Exception
{
public RollbackException( String message, Throwable cause )
{
super( message, cause );
}
public RollbackException( String message )
{
super( message );
}
}

View File

@ -1,5 +1,30 @@
<component-set>
<components>
<component>
<role>org.apache.maven.tools.repoclean.RepositoryCleaner</role>
<implementation>org.apache.maven.tools.repoclean.RepositoryCleaner</implementation>
<requirements>
<requirement>
<role>org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout</role>
<role-hint>alpha-bridging</role-hint>
<field-name>bridgingLayout</field-name>
</requirement>
<requirement>
<role>org.apache.maven.tools.repoclean.digest.DigestVerifier</role>
</requirement>
<requirement>
<role>org.apache.maven.tools.repoclean.index.ArtifactIndexer</role>
</requirement>
<requirement>
<role>org.codehaus.plexus.mailsender.MailSender</role>
</requirement>
</requirements>
</component>
<!--
|
|
|
-->
<component>
<role>org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout</role>
<role-hint>alpha-bridging</role-hint>
@ -100,31 +125,6 @@
<role-hint>sar</role-hint>
<implementation>org.apache.maven.tools.repoclean.artifact.handler.SarHandler</implementation>
</component>
<!--
|
|
|
-->
<component>
<role>org.apache.maven.tools.repoclean.RepositoryCleaner</role>
<implementation>org.apache.maven.tools.repoclean.RepositoryCleaner</implementation>
<requirements>
<requirement>
<role>org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout</role>
<role-hint>alpha-bridging</role-hint>
<field-name>bridgingLayout</field-name>
</requirement>
<requirement>
<role>org.apache.maven.tools.repoclean.digest.ArtifactDigestVerifier</role>
</requirement>
<requirement>
<role>org.apache.maven.tools.repoclean.index.ArtifactIndexer</role>
</requirement>
<requirement>
<role>org.codehaus.plexus.mailsender.MailSender</role>
</requirement>
</requirements>
</component>
<!--
|
|
@ -160,11 +160,11 @@
|
-->
<component>
<role>org.apache.maven.tools.repoclean.digest.ArtifactDigestVerifier</role>
<implementation>org.apache.maven.tools.repoclean.digest.ArtifactDigestVerifier</implementation>
<role>org.apache.maven.tools.repoclean.digest.DigestVerifier</role>
<implementation>org.apache.maven.tools.repoclean.digest.DigestVerifier</implementation>
<requirements>
<requirement>
<role>org.apache.maven.tools.repoclean.digest.ArtifactDigestor</role>
<role>org.apache.maven.tools.repoclean.digest.Digestor</role>
</requirement>
</requirements>
</component>
@ -174,8 +174,8 @@
|
-->
<component>
<role>org.apache.maven.tools.repoclean.digest.ArtifactDigestor</role>
<implementation>org.apache.maven.tools.repoclean.digest.ArtifactDigestor</implementation>
<role>org.apache.maven.tools.repoclean.digest.Digestor</role>
<implementation>org.apache.maven.tools.repoclean.digest.Digestor</implementation>
</component>
<!--
|

View File

@ -4,6 +4,7 @@ import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.DefaultArtifact;
import org.apache.maven.tools.repoclean.TestSupport;
import org.apache.maven.tools.repoclean.report.DummyReporter;
import org.apache.maven.tools.repoclean.transaction.RewriteTransaction;
import org.codehaus.plexus.PlexusTestCase;
import java.io.File;
@ -27,28 +28,29 @@ import java.io.File;
public class ArtifactDigestVerifierTest
extends PlexusTestCase
{
public void testShouldWriteBothMD5AndSHA1DigestFiles() throws Exception
public void testShouldWriteBothMD5AndSHA1DigestFiles()
throws Exception
{
ArtifactDigestVerifier verifier = (ArtifactDigestVerifier) lookup( ArtifactDigestVerifier.ROLE );
Artifact artifact = new DefaultArtifact("testGroup", "testArtifact", "1.0", "jar");
File artifactFile = TestSupport.getResource("digest/ArtifactDigestorTest/digestFormatVerifyArtifact.jar");
artifact.setFile(artifactFile);
File tempFile = File.createTempFile("artifactDigestFileVerifyBase", "jar");
File md5 = new File(tempFile + ".md5");
File sha1 = new File(tempFile + ".sha1");
System.out.println("[INFO] We expect warnings for missing source digest files here:");
verifier.verifyDigest(artifact, tempFile, new DummyReporter(), false);
System.out.println("[INFO] Target digest files should have been created.");
assertTrue(md5.exists());
assertTrue(sha1.exists());
DigestVerifier verifier = (DigestVerifier) lookup( DigestVerifier.ROLE );
Artifact artifact = new DefaultArtifact( "testGroup", "testArtifact", "1.0", "jar" );
File artifactFile = TestSupport.getResource( "digest/ArtifactDigestorTest/digestFormatVerifyArtifact.jar" );
artifact.setFile( artifactFile );
File tempFile = File.createTempFile( "artifactDigestFileVerifyBase", "jar" );
File md5 = new File( tempFile + ".md5" );
File sha1 = new File( tempFile + ".sha1" );
System.out.println( "[INFO] We expect warnings for missing source digest files here:" );
verifier.verifyDigest( artifactFile, tempFile, new RewriteTransaction( artifact ), new DummyReporter(), false );
System.out.println( "[INFO] Target digest files should have been created." );
assertTrue( md5.exists() );
assertTrue( sha1.exists() );
}
}

View File

@ -32,11 +32,11 @@ public class ArtifactDigestorTest
public void testShouldWriteDigestFileInHexNotBinary() throws Exception
{
ArtifactDigestor digestor = new ArtifactDigestor();
Digestor digestor = new Digestor();
File artifact = TestSupport.getMyResource(this, DIGEST_FORMAT_VERIFY_ARTIFACT);
byte[] rawDigest = digestor.generateArtifactDigest( artifact, ArtifactDigestor.MD5 );
byte[] rawDigest = digestor.generateArtifactDigest( artifact, Digestor.MD5 );
StringBuffer rawConverted = new StringBuffer(rawDigest.length * 2);
for ( int i = 0; i < rawDigest.length; i++ )
@ -46,7 +46,7 @@ public class ArtifactDigestorTest
File digestFile = File.createTempFile("repoclean-artifactDigest-formatTest", ".md5");
digestor.createArtifactDigest( artifact, digestFile, ArtifactDigestor.MD5 );
digestor.createArtifactDigest( artifact, digestFile, Digestor.MD5 );
FileReader reader = new FileReader(digestFile);
StringBuffer written = new StringBuffer(rawDigest.length * 2);