mirror of https://github.com/apache/archiva.git
remove some duplicates
git-svn-id: https://svn.apache.org/repos/asf/maven/repository-manager/trunk@412303 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
8aad68cfe4
commit
6f8e0cea47
|
@ -480,33 +480,17 @@ public class DefaultRepositoryConverter
|
||||||
if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" ) ||
|
if ( properties.containsKey( "relocated.groupId" ) || properties.containsKey( "relocated.artifactId" ) ||
|
||||||
properties.containsKey( "relocated.version" ) )
|
properties.containsKey( "relocated.version" ) )
|
||||||
{
|
{
|
||||||
String newGroupId = v3Model.getGroupId();
|
String newGroupId = properties.getProperty( "relocated.groupId", v3Model.getGroupId() );
|
||||||
if ( properties.containsKey( "relocated.groupId" ) )
|
properties.remove( "relocated.groupId" );
|
||||||
{
|
|
||||||
newGroupId = properties.getProperty( "relocated.groupId" );
|
|
||||||
properties.remove( "relocated.groupId" );
|
|
||||||
}
|
|
||||||
|
|
||||||
String newArtifactId = v3Model.getArtifactId();
|
String newArtifactId = properties.getProperty( "relocated.artifactId", v3Model.getArtifactId() );
|
||||||
if ( properties.containsKey( "relocated.artifactId" ) )
|
properties.remove( "relocated.artifactId" );
|
||||||
{
|
|
||||||
newArtifactId = properties.getProperty( "relocated.artifactId" );
|
|
||||||
properties.remove( "relocated.artifactId" );
|
|
||||||
}
|
|
||||||
|
|
||||||
String newVersion = v3Model.getVersion();
|
String newVersion = properties.getProperty( "relocated.version", v3Model.getVersion() );
|
||||||
if ( properties.containsKey( "relocated.version" ) )
|
properties.remove( "relocated.version" );
|
||||||
{
|
|
||||||
newVersion = properties.getProperty( "relocated.version" );
|
|
||||||
properties.remove( "relocated.version" );
|
|
||||||
}
|
|
||||||
|
|
||||||
String message = "";
|
String message = properties.getProperty( "relocated.message", "" );
|
||||||
if ( properties.containsKey( "relocated.message" ) )
|
properties.remove( "relocated.message" );
|
||||||
{
|
|
||||||
message = properties.getProperty( "relocated.message" );
|
|
||||||
properties.remove( "relocated.message" );
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( properties.isEmpty() )
|
if ( properties.isEmpty() )
|
||||||
{
|
{
|
||||||
|
@ -580,31 +564,14 @@ public class DefaultRepositoryConverter
|
||||||
private boolean testChecksums( Artifact artifact, File file, ArtifactReporter reporter )
|
private boolean testChecksums( Artifact artifact, File file, ArtifactReporter reporter )
|
||||||
throws IOException, RepositoryConversionException
|
throws IOException, RepositoryConversionException
|
||||||
{
|
{
|
||||||
boolean result = true;
|
boolean result;
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
File md5 = new File( file.getParentFile(), file.getName() + ".md5" );
|
result = verifyChecksum( file, file.getName() + ".md5", Digester.MD5, reporter, artifact,
|
||||||
if ( md5.exists() )
|
"failure.incorrect.md5" );
|
||||||
{
|
result = result && verifyChecksum( file, file.getName() + ".sha1", Digester.SHA1, reporter, artifact,
|
||||||
String checksum = FileUtils.fileRead( md5 );
|
"failure.incorrect.sha1" );
|
||||||
if ( !digester.verifyChecksum( file, checksum, Digester.MD5 ) )
|
|
||||||
{
|
|
||||||
reporter.addFailure( artifact, getI18NString( "failure.incorrect.md5" ) );
|
|
||||||
result = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
File sha1 = new File( file.getParentFile(), file.getName() + ".sha1" );
|
|
||||||
if ( sha1.exists() )
|
|
||||||
{
|
|
||||||
String checksum = FileUtils.fileRead( sha1 );
|
|
||||||
if ( !digester.verifyChecksum( file, checksum, Digester.SHA1 ) )
|
|
||||||
{
|
|
||||||
reporter.addFailure( artifact, getI18NString( "failure.incorrect.sha1" ) );
|
|
||||||
result = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch ( NoSuchAlgorithmException e )
|
catch ( NoSuchAlgorithmException e )
|
||||||
{
|
{
|
||||||
|
@ -613,6 +580,25 @@ public class DefaultRepositoryConverter
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean verifyChecksum( File file, String fileName, String algorithm, ArtifactReporter reporter,
|
||||||
|
Artifact artifact, String key )
|
||||||
|
throws IOException, NoSuchAlgorithmException
|
||||||
|
{
|
||||||
|
boolean result = true;
|
||||||
|
|
||||||
|
File md5 = new File( file.getParentFile(), fileName );
|
||||||
|
if ( md5.exists() )
|
||||||
|
{
|
||||||
|
String checksum = FileUtils.fileRead( md5 );
|
||||||
|
if ( !digester.verifyChecksum( file, checksum, algorithm ) )
|
||||||
|
{
|
||||||
|
reporter.addFailure( artifact, getI18NString( key ) );
|
||||||
|
result = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter,
|
private boolean copyArtifact( Artifact artifact, ArtifactRepository targetRepository, ArtifactReporter reporter,
|
||||||
FileTransaction transaction )
|
FileTransaction transaction )
|
||||||
throws RepositoryConversionException
|
throws RepositoryConversionException
|
||||||
|
|
|
@ -253,12 +253,12 @@ public class RepositoryConverterTest
|
||||||
// TODO: check 2 warnings (extend and versions) matched on i18n key
|
// TODO: check 2 warnings (extend and versions) matched on i18n key
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testV4SnapshotPomConvert()
|
private void doTestV4SnapshotPomConvert( String version, String expectedMetadataFileName )
|
||||||
throws IOException, RepositoryConversionException
|
throws RepositoryConversionException, IOException
|
||||||
{
|
{
|
||||||
// test that it is copied as is
|
// test that it is copied as is
|
||||||
|
|
||||||
Artifact artifact = createArtifact( "test", "v4artifact", "1.0.0-SNAPSHOT" );
|
Artifact artifact = createArtifact( "test", "v4artifact", version );
|
||||||
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
|
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
|
||||||
File artifactMetadataFile = new File( targetRepository.getBasedir(),
|
File artifactMetadataFile = new File( targetRepository.getBasedir(),
|
||||||
targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
|
targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
|
||||||
|
@ -291,7 +291,7 @@ public class RepositoryConverterTest
|
||||||
|
|
||||||
assertTrue( "Check snapshot metadata created", snapshotMetadataFile.exists() );
|
assertTrue( "Check snapshot metadata created", snapshotMetadataFile.exists() );
|
||||||
|
|
||||||
expectedMetadataFile = getTestFile( "src/test/expected-files/v4-snapshot-metadata.xml" );
|
expectedMetadataFile = getTestFile( expectedMetadataFileName );
|
||||||
|
|
||||||
compareFiles( expectedMetadataFile, snapshotMetadataFile );
|
compareFiles( expectedMetadataFile, snapshotMetadataFile );
|
||||||
}
|
}
|
||||||
|
@ -339,47 +339,21 @@ public class RepositoryConverterTest
|
||||||
compareFiles( expectedMetadataFile, snapshotMetadataFile );
|
compareFiles( expectedMetadataFile, snapshotMetadataFile );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testV4SnapshotPomConvert()
|
||||||
|
throws IOException, RepositoryConversionException
|
||||||
|
{
|
||||||
|
doTestV4SnapshotPomConvert( "1.0.0-SNAPSHOT", "src/test/expected-files/v4-snapshot-metadata.xml" );
|
||||||
|
|
||||||
|
assertTrue( true );
|
||||||
|
}
|
||||||
|
|
||||||
public void testV4TimestampedSnapshotPomConvert()
|
public void testV4TimestampedSnapshotPomConvert()
|
||||||
throws IOException, RepositoryConversionException
|
throws IOException, RepositoryConversionException
|
||||||
{
|
{
|
||||||
// test that it is copied as is
|
doTestV4SnapshotPomConvert( "1.0.0-20060111.120115-1",
|
||||||
|
"src/test/expected-files/v4-timestamped-snapshot-metadata.xml" );
|
||||||
|
|
||||||
Artifact artifact = createArtifact( "test", "v4artifact", "1.0.0-20060111.120115-1" );
|
assertTrue( true );
|
||||||
ArtifactMetadata artifactMetadata = new ArtifactRepositoryMetadata( artifact );
|
|
||||||
File artifactMetadataFile = new File( targetRepository.getBasedir(),
|
|
||||||
targetRepository.pathOfRemoteRepositoryMetadata( artifactMetadata ) );
|
|
||||||
artifactMetadataFile.delete();
|
|
||||||
|
|
||||||
ArtifactMetadata snapshotMetadata = new SnapshotArtifactRepositoryMetadata( artifact );
|
|
||||||
File snapshotMetadataFile = new File( targetRepository.getBasedir(),
|
|
||||||
targetRepository.pathOfRemoteRepositoryMetadata( snapshotMetadata ) );
|
|
||||||
snapshotMetadataFile.delete();
|
|
||||||
|
|
||||||
repositoryConverter.convert( artifact, targetRepository, reporter );
|
|
||||||
checkSuccess();
|
|
||||||
|
|
||||||
File artifactFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
|
|
||||||
assertTrue( "Check artifact created", artifactFile.exists() );
|
|
||||||
assertTrue( "Check artifact matches", FileUtils.contentEquals( artifactFile, artifact.getFile() ) );
|
|
||||||
|
|
||||||
artifact = createPomArtifact( artifact );
|
|
||||||
File pomFile = new File( targetRepository.getBasedir(), targetRepository.pathOf( artifact ) );
|
|
||||||
File sourcePomFile = new File( sourceRepository.getBasedir(), sourceRepository.pathOf( artifact ) );
|
|
||||||
assertTrue( "Check POM created", pomFile.exists() );
|
|
||||||
|
|
||||||
compareFiles( sourcePomFile, pomFile );
|
|
||||||
|
|
||||||
assertTrue( "Check artifact metadata created", artifactMetadataFile.exists() );
|
|
||||||
|
|
||||||
File expectedMetadataFile = getTestFile( "src/test/expected-files/v4-snapshot-artifact-metadata.xml" );
|
|
||||||
|
|
||||||
compareFiles( expectedMetadataFile, artifactMetadataFile );
|
|
||||||
|
|
||||||
assertTrue( "Check snapshot metadata created", snapshotMetadataFile.exists() );
|
|
||||||
|
|
||||||
expectedMetadataFile = getTestFile( "src/test/expected-files/v4-timestamped-snapshot-metadata.xml" );
|
|
||||||
|
|
||||||
compareFiles( expectedMetadataFile, snapshotMetadataFile );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testV3TimestampedSnapshotPomConvert()
|
public void testV3TimestampedSnapshotPomConvert()
|
||||||
|
|
|
@ -276,6 +276,7 @@ public class DefaultRepositoryIndexSearcher
|
||||||
//group metadata
|
//group metadata
|
||||||
if ( metadataType.equals( MetadataRepositoryIndex.GROUP_METADATA ) )
|
if ( metadataType.equals( MetadataRepositoryIndex.GROUP_METADATA ) )
|
||||||
{
|
{
|
||||||
|
// TODO! use pathOfMetadata
|
||||||
is = new FileInputStream(
|
is = new FileInputStream(
|
||||||
new File( index.getRepository().getBasedir() + groupId.replace( '.', '/' ) + "/" + filename ) );
|
new File( index.getRepository().getBasedir() + groupId.replace( '.', '/' ) + "/" + filename ) );
|
||||||
repoMetadata = new GroupRepositoryMetadata( groupId );
|
repoMetadata = new GroupRepositoryMetadata( groupId );
|
||||||
|
@ -284,6 +285,7 @@ public class DefaultRepositoryIndexSearcher
|
||||||
//artifact metadata
|
//artifact metadata
|
||||||
else if ( metadataType.equals( MetadataRepositoryIndex.ARTIFACT_METADATA ) )
|
else if ( metadataType.equals( MetadataRepositoryIndex.ARTIFACT_METADATA ) )
|
||||||
{
|
{
|
||||||
|
// TODO! use pathOfMetadata
|
||||||
is = new FileInputStream( new File( index.getRepository().getBasedir() + groupId.replace( '.', '/' ) + "/" +
|
is = new FileInputStream( new File( index.getRepository().getBasedir() + groupId.replace( '.', '/' ) + "/" +
|
||||||
artifactId + "/" + filename ) );
|
artifactId + "/" + filename ) );
|
||||||
repoMetadata =
|
repoMetadata =
|
||||||
|
@ -293,6 +295,7 @@ public class DefaultRepositoryIndexSearcher
|
||||||
//snapshot/version metadata
|
//snapshot/version metadata
|
||||||
else if ( metadataType.equals( MetadataRepositoryIndex.SNAPSHOT_METADATA ) )
|
else if ( metadataType.equals( MetadataRepositoryIndex.SNAPSHOT_METADATA ) )
|
||||||
{
|
{
|
||||||
|
// TODO! use pathOfMetadata
|
||||||
is = new FileInputStream( new File( index.getRepository().getBasedir() + groupId.replace( '.', '/' ) + "/" +
|
is = new FileInputStream( new File( index.getRepository().getBasedir() + groupId.replace( '.', '/' ) + "/" +
|
||||||
artifactId + "/" + version + "/" + filename ) );
|
artifactId + "/" + version + "/" + filename ) );
|
||||||
repoMetadata = new SnapshotArtifactRepositoryMetadata(
|
repoMetadata = new SnapshotArtifactRepositoryMetadata(
|
||||||
|
|
|
@ -153,6 +153,7 @@ public class MetadataRepositoryIndex
|
||||||
{
|
{
|
||||||
doc.add( Field.Text( FLD_VERSION, "" ) );
|
doc.add( Field.Text( FLD_VERSION, "" ) );
|
||||||
}
|
}
|
||||||
|
// TODO! do we need to add all these empty fields?
|
||||||
doc.add( Field.Text( FLD_DOCTYPE, METADATA ) );
|
doc.add( Field.Text( FLD_DOCTYPE, METADATA ) );
|
||||||
doc.add( Field.Keyword( FLD_PACKAGING, "" ) );
|
doc.add( Field.Keyword( FLD_PACKAGING, "" ) );
|
||||||
doc.add( Field.Text( FLD_SHA1, "" ) );
|
doc.add( Field.Text( FLD_SHA1, "" ) );
|
||||||
|
|
|
@ -140,6 +140,7 @@ public class PomRepositoryIndex
|
||||||
doc.add( Field.Text( FLD_PLUGINS_ALL, "" ) );
|
doc.add( Field.Text( FLD_PLUGINS_ALL, "" ) );
|
||||||
}
|
}
|
||||||
doc.add( Field.UnIndexed( FLD_DOCTYPE, POM ) );
|
doc.add( Field.UnIndexed( FLD_DOCTYPE, POM ) );
|
||||||
|
// TODO! do we need to add all these empty fields?
|
||||||
doc.add( Field.Text( FLD_PLUGINPREFIX, "" ) );
|
doc.add( Field.Text( FLD_PLUGINPREFIX, "" ) );
|
||||||
doc.add( Field.Text( FLD_LASTUPDATE, "" ) );
|
doc.add( Field.Text( FLD_LASTUPDATE, "" ) );
|
||||||
doc.add( Field.Text( FLD_NAME, "" ) );
|
doc.add( Field.Text( FLD_NAME, "" ) );
|
||||||
|
|
|
@ -35,7 +35,9 @@ import java.security.NoSuchAlgorithmException;
|
||||||
public class ChecksumArtifactReporter
|
public class ChecksumArtifactReporter
|
||||||
implements ArtifactReportProcessor
|
implements ArtifactReportProcessor
|
||||||
{
|
{
|
||||||
/** @plexus.requirement */
|
/**
|
||||||
|
* @plexus.requirement
|
||||||
|
*/
|
||||||
private Digester digester;
|
private Digester digester;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -60,60 +62,39 @@ public class ChecksumArtifactReporter
|
||||||
String path = repository.pathOf( artifact );
|
String path = repository.pathOf( artifact );
|
||||||
File file = new File( repository.getBasedir(), path );
|
File file = new File( repository.getBasedir(), path );
|
||||||
|
|
||||||
File md5File = new File( repository.getBasedir(), path + ".md5" );
|
verifyChecksum( repository, path + ".md5", file, Digester.MD5, reporter, artifact );
|
||||||
if ( md5File.exists() )
|
verifyChecksum( repository, path + ".sha1", file, Digester.SHA1, reporter, artifact );
|
||||||
{
|
}
|
||||||
try
|
|
||||||
{
|
|
||||||
if ( digester.verifyChecksum( file, FileUtils.fileRead( md5File ), Digester.MD5 ) )
|
|
||||||
{
|
|
||||||
reporter.addSuccess( artifact );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
reporter.addFailure( artifact, "MD5 checksum does not match." );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch ( NoSuchAlgorithmException e )
|
|
||||||
{
|
|
||||||
reporter.addFailure( artifact, "Unable to read MD5: " + e.getMessage() );
|
|
||||||
}
|
|
||||||
catch ( IOException e )
|
|
||||||
{
|
|
||||||
reporter.addFailure( artifact, "Unable to read MD5: " + e.getMessage() );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
reporter.addFailure( artifact, "MD5 checksum file does not exist." );
|
|
||||||
}
|
|
||||||
|
|
||||||
File sha1File = new File( repository.getBasedir(), path + ".sha1" );
|
private void verifyChecksum( ArtifactRepository repository, String path, File file, String checksumAlgorithm,
|
||||||
if ( sha1File.exists() )
|
ArtifactReporter reporter, Artifact artifact )
|
||||||
|
{
|
||||||
|
File checksumFile = new File( repository.getBasedir(), path );
|
||||||
|
if ( checksumFile.exists() )
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
if ( digester.verifyChecksum( file, FileUtils.fileRead( sha1File ), Digester.SHA1 ) )
|
if ( digester.verifyChecksum( file, FileUtils.fileRead( checksumFile ), checksumAlgorithm ) )
|
||||||
{
|
{
|
||||||
reporter.addSuccess( artifact );
|
reporter.addSuccess( artifact );
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
reporter.addFailure( artifact, "SHA-1 checksum does not match." );
|
reporter.addFailure( artifact, checksumAlgorithm + " checksum does not match." );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch ( NoSuchAlgorithmException e )
|
catch ( NoSuchAlgorithmException e )
|
||||||
{
|
{
|
||||||
reporter.addFailure( artifact, "Unable to read SHA-1: " + e.getMessage() );
|
reporter.addFailure( artifact, "Unable to read " + checksumAlgorithm + ": " + e.getMessage() );
|
||||||
}
|
}
|
||||||
catch ( IOException e )
|
catch ( IOException e )
|
||||||
{
|
{
|
||||||
reporter.addFailure( artifact, "Unable to read SHA-1: " + e.getMessage() );
|
reporter.addFailure( artifact, "Unable to read " + checksumAlgorithm + ": " + e.getMessage() );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
reporter.addFailure( artifact, "SHA-1 checksum file does not exist." );
|
reporter.addFailure( artifact, checksumAlgorithm + " checksum file does not exist." );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,9 @@ import java.security.NoSuchAlgorithmException;
|
||||||
public class ChecksumMetadataReporter
|
public class ChecksumMetadataReporter
|
||||||
implements MetadataReportProcessor
|
implements MetadataReportProcessor
|
||||||
{
|
{
|
||||||
/** @plexus.requirement */
|
/**
|
||||||
|
* @plexus.requirement
|
||||||
|
*/
|
||||||
private Digester digester;
|
private Digester digester;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -54,62 +56,41 @@ public class ChecksumMetadataReporter
|
||||||
String path = repository.pathOfRemoteRepositoryMetadata( metadata );
|
String path = repository.pathOfRemoteRepositoryMetadata( metadata );
|
||||||
File file = new File( repository.getBasedir(), path );
|
File file = new File( repository.getBasedir(), path );
|
||||||
|
|
||||||
File md5File = new File( repository.getBasedir(), path + ".md5" );
|
verifyChecksum( repository, path + ".md5", file, Digester.MD5, reporter, metadata );
|
||||||
if ( md5File.exists() )
|
verifyChecksum( repository, path + ".sha1", file, Digester.SHA1, reporter, metadata );
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if ( digester.verifyChecksum( file, FileUtils.fileRead( md5File ), Digester.MD5 ) )
|
|
||||||
{
|
|
||||||
reporter.addSuccess( metadata );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "MD5 checksum does not match." );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch ( NoSuchAlgorithmException e )
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "Unable to read MD5: " + e.getMessage() );
|
|
||||||
}
|
|
||||||
catch ( IOException e )
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "Unable to read MD5: " + e.getMessage() );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "MD5 checksum file does not exist." );
|
|
||||||
}
|
|
||||||
|
|
||||||
File sha1File = new File( repository.getBasedir(), path + ".sha1" );
|
|
||||||
if ( sha1File.exists() )
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if ( digester.verifyChecksum( file, FileUtils.fileRead( sha1File ), Digester.SHA1 ) )
|
|
||||||
{
|
|
||||||
reporter.addSuccess( metadata );
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "SHA-1 checksum does not match." );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch ( NoSuchAlgorithmException e )
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "Unable to read SHA1: " + e.getMessage() );
|
|
||||||
}
|
|
||||||
catch ( IOException e )
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "Unable to read SHA1: " + e.getMessage() );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
reporter.addFailure( metadata, "SHA-1 checksum file does not exist." );
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void verifyChecksum( ArtifactRepository repository, String path, File file, String checksumAlgorithm,
|
||||||
|
ArtifactReporter reporter, RepositoryMetadata metadata )
|
||||||
|
{
|
||||||
|
File checksumFile = new File( repository.getBasedir(), path );
|
||||||
|
if ( checksumFile.exists() )
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
if ( digester.verifyChecksum( file, FileUtils.fileRead( checksumFile ), checksumAlgorithm ) )
|
||||||
|
{
|
||||||
|
reporter.addSuccess( metadata );
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, checksumAlgorithm + " checksum does not match." );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch ( NoSuchAlgorithmException e )
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, "Unable to read " + checksumAlgorithm + ": " + e.getMessage() );
|
||||||
|
}
|
||||||
|
catch ( IOException e )
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, "Unable to read " + checksumAlgorithm + ": " + e.getMessage() );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
reporter.addFailure( metadata, checksumAlgorithm + " checksum file does not exist." );
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -152,10 +152,11 @@ public class DiscovererExecution
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Method that discovers and indexes artifacts, poms and metadata in a default
|
* Method that discovers and indexes artifacts, poms and metadata in a default
|
||||||
* m2 repository structure
|
* m2 repository structure.
|
||||||
*
|
*
|
||||||
* @throws MalformedURLException
|
* @throws MalformedURLException
|
||||||
* @throws RepositoryIndexException
|
* @throws RepositoryIndexException
|
||||||
|
* @todo why is this any different from legacy?
|
||||||
*/
|
*/
|
||||||
protected void executeDiscovererInDefaultRepo()
|
protected void executeDiscovererInDefaultRepo()
|
||||||
throws MalformedURLException, RepositoryIndexException
|
throws MalformedURLException, RepositoryIndexException
|
||||||
|
|
Loading…
Reference in New Issue