[MRM-1196]

o fix or generate checksums when artifacts are deployed from UI
o created unit tests for UploadAction


git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@783687 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Maria Odea B. Ching 2009-06-11 08:16:06 +00:00
parent 03eab14b77
commit ea8c1fa4c9
5 changed files with 483 additions and 16 deletions

View File

@ -35,7 +35,7 @@ import java.util.Date;
import java.util.List;
import java.util.TimeZone;
import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.checksum.ChecksummedFile;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang.StringUtils;
@ -60,7 +60,6 @@ import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
import org.apache.maven.archiva.repository.project.ProjectModelException;
import org.apache.maven.archiva.repository.project.ProjectModelWriter;
import org.apache.maven.archiva.repository.project.writers.ProjectModel400Writer;
import org.apache.maven.archiva.repository.scanner.RepositoryContentConsumers;
import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
import org.apache.maven.archiva.scheduled.tasks.RepositoryTask;
import org.apache.maven.archiva.scheduled.tasks.TaskCreator;
@ -80,11 +79,6 @@ public class UploadAction
extends PlexusActionSupport
implements Validateable, Preparable, Auditable
{
/**
* @plexus.requirement
*/
private RepositoryContentConsumers consumers;
/**
* The groupId of the artifact to be deployed.
*/
@ -352,10 +346,9 @@ public class UploadAction
}
try
{
{
copyFile( artifactFile, targetPath, filename );
queueRepositoryTask( repository.getId(), repository.toFile( artifactReference ) );
//consumers.executeConsumers( repoConfig, repository.toFile( artifactReference ) );
}
catch ( IOException ie )
{
@ -375,8 +368,8 @@ public class UploadAction
try
{
File generatedPomFile = createPom( targetPath, pomFilename );
queueRepositoryTask( repoConfig.getId(), generatedPomFile );
//consumers.executeConsumers( repoConfig, generatedPomFile );
fixChecksums( generatedPomFile );
queueRepositoryTask( repoConfig.getId(), generatedPomFile );
}
catch ( IOException ie )
{
@ -393,10 +386,9 @@ public class UploadAction
if ( pomFile != null && pomFile.length() > 0 )
{
try
{
{
copyFile( pomFile, targetPath, pomFilename );
queueRepositoryTask( repoConfig.getId(), new File( targetPath, pomFilename ) );
//consumers.executeConsumers( repoConfig, new File( targetPath, pomFilename ) );
}
catch ( IOException ie )
{
@ -429,7 +421,13 @@ public class UploadAction
return ERROR;
}
}
private void fixChecksums( File file )
{
ChecksummedFile checksum = new ChecksummedFile( file );
checksum.fixChecksums( algorithms );
}
private void copyFile( File sourceFile, File targetPath, String targetFilename )
throws IOException
{
@ -450,6 +448,8 @@ public class UploadAction
out.close();
input.close();
}
fixChecksums( new File( targetPath, targetFilename ) );
}
private File createPom( File targetPath, String filename )
@ -543,8 +543,7 @@ public class UploadAction
}
RepositoryMetadataWriter.write( metadata, metadataFile );
ChecksummedFile checksum = new ChecksummedFile( metadataFile );
checksum.fixChecksums( algorithms );
fixChecksums( metadataFile );
}
public void validate()
@ -614,4 +613,19 @@ public class UploadAction
"']." );
}
}
public void setScheduler( ArchivaTaskScheduler scheduler )
{
this.scheduler = scheduler;
}
public void setRepositoryFactory( RepositoryContentFactory repositoryFactory )
{
this.repositoryFactory = repositoryFactory;
}
public void setConfiguration( ArchivaConfiguration configuration )
{
this.configuration = configuration;
}
}

View File

@ -0,0 +1,447 @@
package org.apache.maven.archiva.web.action;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.IOException;
import org.apache.archiva.checksum.ChecksumAlgorithm;
import org.apache.archiva.checksum.ChecksummedFile;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.Configuration;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryNotFoundException;
import org.apache.maven.archiva.repository.content.ManagedDefaultRepositoryContent;
import org.apache.maven.archiva.repository.metadata.MetadataTools;
import org.apache.maven.archiva.scheduled.ArchivaTaskScheduler;
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
import org.easymock.MockControl;
import org.easymock.classextension.MockClassControl;
import com.opensymphony.xwork2.Action;
/**
* UploadActionTest
*/
public class UploadActionTest
extends PlexusInSpringTestCase
{
private ArchivaTaskScheduler scheduler;
private MockControl schedulerControl;
private UploadAction uploadAction;
private ArchivaConfiguration archivaConfig;
private MockControl archivaConfigControl;
private RepositoryContentFactory repoFactory;
private MockControl repoFactoryControl;
private static final String REPOSITORY_ID = "test-repo";
private Configuration config;
public void setUp()
throws Exception
{
super.setUp();
schedulerControl = MockControl.createControl( ArchivaTaskScheduler.class );
scheduler = (ArchivaTaskScheduler) schedulerControl.getMock();
archivaConfigControl = MockControl.createControl( ArchivaConfiguration.class );
archivaConfig = (ArchivaConfiguration) archivaConfigControl.getMock();
repoFactoryControl = MockClassControl.createControl( RepositoryContentFactory.class );
repoFactory = (RepositoryContentFactory) repoFactoryControl.getMock();
uploadAction = new UploadAction();
uploadAction.setScheduler( scheduler );
uploadAction.setConfiguration( archivaConfig );
uploadAction.setRepositoryFactory( repoFactory );
File testRepo = new File( getBasedir(), "target/test-classes/test-repo" );
testRepo.mkdirs();
assertTrue( testRepo.exists() );
config = new Configuration();
ManagedRepositoryConfiguration repoConfig = new ManagedRepositoryConfiguration();
repoConfig.setId( REPOSITORY_ID );
repoConfig.setLayout( "default" );
repoConfig.setLocation( testRepo.getPath() );
repoConfig.setName( REPOSITORY_ID );
config.addManagedRepository( repoConfig );
}
public void tearDown()
throws Exception
{
File testRepo = new File( config.findManagedRepositoryById( REPOSITORY_ID ).getLocation() );
FileUtils.deleteDirectory( testRepo );
assertFalse( testRepo.exists() );
super.tearDown();
}
private void setUploadParameters( String version, String classifier, File artifact, File pomFile,
boolean generatePom )
{
uploadAction.setRepositoryId( REPOSITORY_ID );
uploadAction.setGroupId( "org.apache.archiva" );
uploadAction.setArtifactId( "artifact-upload" );
uploadAction.setVersion( version );
uploadAction.setPackaging( "jar" );
uploadAction.setClassifier( classifier );
uploadAction.setArtifact( artifact );
uploadAction.setPom( pomFile );
uploadAction.setGeneratePom( generatePom );
}
private void assertAllArtifactsIncludingSupportArtifactsArePresent( String repoLocation )
{
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.jar" ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.jar.sha1" ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.jar.md5" ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.pom" ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.pom.sha1" ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.pom.md5" ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/" + MetadataTools.MAVEN_METADATA ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/" + MetadataTools.MAVEN_METADATA +
".sha1" ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/" + MetadataTools.MAVEN_METADATA +
".md5" ).exists() );
}
private void verifyChecksums( String repoLocation )
throws IOException
{
// verify checksums of jar file
ChecksummedFile checksum =
new ChecksummedFile( new File( repoLocation,
"/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.jar" ) );
String sha1 = checksum.calculateChecksum( ChecksumAlgorithm.SHA1 );
String md5 = checksum.calculateChecksum( ChecksumAlgorithm.MD5 );
String contents =
FileUtils.readFileToString( new File( repoLocation,
"/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.jar.sha1" ) );
assertTrue( StringUtils.contains( contents, sha1 ) );
contents =
FileUtils.readFileToString( new File( repoLocation,
"/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.jar.md5" ) );
assertTrue( StringUtils.contains( contents, md5 ) );
// verify checksums of pom file
checksum =
new ChecksummedFile( new File( repoLocation,
"/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.pom" ) );
sha1 = checksum.calculateChecksum( ChecksumAlgorithm.SHA1 );
md5 = checksum.calculateChecksum( ChecksumAlgorithm.MD5 );
contents =
FileUtils.readFileToString( new File( repoLocation,
"/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.pom.sha1" ) );
assertTrue( StringUtils.contains( contents, sha1 ) );
contents =
FileUtils.readFileToString( new File( repoLocation,
"/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.pom.md5" ) );
assertTrue( StringUtils.contains( contents, md5 ) );
// verify checksums of metadata file
checksum =
new ChecksummedFile( new File( repoLocation, "/org/apache/archiva/artifact-upload/" +
MetadataTools.MAVEN_METADATA ) );
sha1 = checksum.calculateChecksum( ChecksumAlgorithm.SHA1 );
md5 = checksum.calculateChecksum( ChecksumAlgorithm.MD5 );
contents =
FileUtils.readFileToString( new File( repoLocation, "/org/apache/archiva/artifact-upload/" +
MetadataTools.MAVEN_METADATA + ".sha1" ) );
assertTrue( StringUtils.contains( contents, sha1 ) );
contents =
FileUtils.readFileToString( new File( repoLocation, "/org/apache/archiva/artifact-upload/" +
MetadataTools.MAVEN_METADATA + ".md5" ) );
assertTrue( StringUtils.contains( contents, md5 ) );
}
public void testArtifactUploadWithPomSuccessful()
throws Exception
{
setUploadParameters( "1.0", null,
new File( getBasedir(),
"target/test-classes/upload-artifact-test/artifact-to-be-uploaded.jar" ),
new File( getBasedir(), "target/test-classes/upload-artifact-test/pom.xml" ), false );
ManagedRepositoryContent content = new ManagedDefaultRepositoryContent();
content.setRepository( config.findManagedRepositoryById( REPOSITORY_ID ) );
archivaConfigControl.expectAndReturn( archivaConfig.getConfiguration(), config );
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( REPOSITORY_ID ), content );
archivaConfigControl.replay();
repoFactoryControl.replay();
String returnString = uploadAction.doUpload();
assertEquals( Action.SUCCESS, returnString );
archivaConfigControl.verify();
repoFactoryControl.verify();
String repoLocation = config.findManagedRepositoryById( REPOSITORY_ID ).getLocation();
assertAllArtifactsIncludingSupportArtifactsArePresent( repoLocation );
verifyChecksums( repoLocation );
}
public void testArtifactUploadWithClassifier()
throws Exception
{
setUploadParameters( "1.0", "tests",
new File( getBasedir(),
"target/test-classes/upload-artifact-test/artifact-to-be-uploaded.jar" ), null,
false );
ManagedRepositoryContent content = new ManagedDefaultRepositoryContent();
content.setRepository( config.findManagedRepositoryById( REPOSITORY_ID ) );
archivaConfigControl.expectAndReturn( archivaConfig.getConfiguration(), config );
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( REPOSITORY_ID ), content );
archivaConfigControl.replay();
repoFactoryControl.replay();
String returnString = uploadAction.doUpload();
assertEquals( Action.SUCCESS, returnString );
archivaConfigControl.verify();
repoFactoryControl.verify();
String repoLocation = config.findManagedRepositoryById( REPOSITORY_ID ).getLocation();
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0-tests.jar" ).exists() );
assertTrue( new File( repoLocation,
"/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0-tests.jar.sha1" ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0-tests.jar.md5" ).exists() );
assertFalse( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.pom" ).exists() );
assertFalse( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.pom.sha1" ).exists() );
assertFalse( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.pom.md5" ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/" + MetadataTools.MAVEN_METADATA ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/" + MetadataTools.MAVEN_METADATA +
".sha1" ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/" + MetadataTools.MAVEN_METADATA +
".md5" ).exists() );
// verify checksums of jar file
ChecksummedFile checksum =
new ChecksummedFile( new File( repoLocation,
"/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0-tests.jar" ) );
String sha1 = checksum.calculateChecksum( ChecksumAlgorithm.SHA1 );
String md5 = checksum.calculateChecksum( ChecksumAlgorithm.MD5 );
String contents =
FileUtils.readFileToString( new File( repoLocation,
"/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0-tests.jar.sha1" ) );
assertTrue( StringUtils.contains( contents, sha1 ) );
contents =
FileUtils.readFileToString( new File( repoLocation,
"/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0-tests.jar.md5" ) );
assertTrue( StringUtils.contains( contents, md5 ) );
// verify checksums of metadata file
checksum =
new ChecksummedFile( new File( repoLocation, "/org/apache/archiva/artifact-upload/" +
MetadataTools.MAVEN_METADATA ) );
sha1 = checksum.calculateChecksum( ChecksumAlgorithm.SHA1 );
md5 = checksum.calculateChecksum( ChecksumAlgorithm.MD5 );
contents =
FileUtils.readFileToString( new File( repoLocation, "/org/apache/archiva/artifact-upload/" +
MetadataTools.MAVEN_METADATA + ".sha1" ) );
assertTrue( StringUtils.contains( contents, sha1 ) );
contents =
FileUtils.readFileToString( new File( repoLocation, "/org/apache/archiva/artifact-upload/" +
MetadataTools.MAVEN_METADATA + ".md5" ) );
assertTrue( StringUtils.contains( contents, md5 ) );
}
public void testArtifactUploadGeneratePomSuccessful()
throws Exception
{
setUploadParameters( "1.0", null,
new File( getBasedir(),
"target/test-classes/upload-artifact-test/artifact-to-be-uploaded.jar" ), null,
true );
ManagedRepositoryContent content = new ManagedDefaultRepositoryContent();
content.setRepository( config.findManagedRepositoryById( REPOSITORY_ID ) );
archivaConfigControl.expectAndReturn( archivaConfig.getConfiguration(), config );
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( REPOSITORY_ID ), content );
archivaConfigControl.replay();
repoFactoryControl.replay();
String returnString = uploadAction.doUpload();
assertEquals( Action.SUCCESS, returnString );
archivaConfigControl.verify();
repoFactoryControl.verify();
String repoLocation = config.findManagedRepositoryById( REPOSITORY_ID ).getLocation();
assertAllArtifactsIncludingSupportArtifactsArePresent( repoLocation );
verifyChecksums( repoLocation );
}
public void testArtifactUploadFailedRepositoryNotFound()
throws Exception
{
setUploadParameters( "1.0", null,
new File( getBasedir(),
"target/test-classes/upload-artifact-test/artifact-to-be-uploaded.jar" ), null,
false );
archivaConfigControl.expectAndReturn( archivaConfig.getConfiguration(), config );
repoFactoryControl.expectAndThrow( repoFactory.getManagedRepositoryContent( REPOSITORY_ID ),
new RepositoryNotFoundException() );
archivaConfigControl.replay();
repoFactoryControl.replay();
String returnString = uploadAction.doUpload();
assertEquals( Action.ERROR, returnString );
archivaConfigControl.verify();
repoFactoryControl.verify();
String repoLocation = config.findManagedRepositoryById( REPOSITORY_ID ).getLocation();
assertFalse( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.jar" ).exists() );
assertFalse( new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0/artifact-upload-1.0.pom" ).exists() );
assertFalse( new File( repoLocation, "/org/apache/archiva/artifact-upload/" + MetadataTools.MAVEN_METADATA ).exists() );
}
public void testArtifactUploadSnapshots()
throws Exception
{
setUploadParameters( "1.0-SNAPSHOT", null,
new File( getBasedir(),
"target/test-classes/upload-artifact-test/artifact-to-be-uploaded.jar" ), null,
true );
ManagedRepositoryContent content = new ManagedDefaultRepositoryContent();
content.setRepository( config.findManagedRepositoryById( REPOSITORY_ID ) );
archivaConfigControl.expectAndReturn( archivaConfig.getConfiguration(), config );
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( REPOSITORY_ID ), content );
archivaConfigControl.replay();
repoFactoryControl.replay();
String returnString = uploadAction.doUpload();
assertEquals( Action.SUCCESS, returnString );
archivaConfigControl.verify();
repoFactoryControl.verify();
String repoLocation = config.findManagedRepositoryById( REPOSITORY_ID ).getLocation();
assertEquals( 6, new File( repoLocation, "/org/apache/archiva/artifact-upload/1.0-SNAPSHOT/" ).list().length );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/" + MetadataTools.MAVEN_METADATA ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/" + MetadataTools.MAVEN_METADATA +
".sha1" ).exists() );
assertTrue( new File( repoLocation, "/org/apache/archiva/artifact-upload/" + MetadataTools.MAVEN_METADATA +
".md5" ).exists() );
}
public void testChecksumIsCorrectWhenArtifactIsReUploaded()
throws Exception
{
setUploadParameters( "1.0", null,
new File( getBasedir(),
"target/test-classes/upload-artifact-test/artifact-to-be-uploaded.jar" ), null,
true );
ManagedRepositoryContent content = new ManagedDefaultRepositoryContent();
content.setRepository( config.findManagedRepositoryById( REPOSITORY_ID ) );
archivaConfigControl.expectAndReturn( archivaConfig.getConfiguration(), config );
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( REPOSITORY_ID ), content );
archivaConfigControl.replay();
repoFactoryControl.replay();
String returnString = uploadAction.doUpload();
assertEquals( Action.SUCCESS, returnString );
archivaConfigControl.verify();
repoFactoryControl.verify();
archivaConfigControl.reset();
repoFactoryControl.reset();
String repoLocation = config.findManagedRepositoryById( REPOSITORY_ID ).getLocation();
assertAllArtifactsIncludingSupportArtifactsArePresent( repoLocation );
verifyChecksums( repoLocation );
// RE-upload artifact
setUploadParameters( "1.0", null,
new File( getBasedir(),
"target/test-classes/upload-artifact-test/artifact-to-be-reuploaded.jar" ),
null, true );
archivaConfigControl.expectAndReturn( archivaConfig.getConfiguration(), config );
repoFactoryControl.expectAndReturn( repoFactory.getManagedRepositoryContent( REPOSITORY_ID ), content );
archivaConfigControl.replay();
repoFactoryControl.replay();
returnString = uploadAction.doUpload();
assertEquals( Action.SUCCESS, returnString );
archivaConfigControl.verify();
repoFactoryControl.verify();
repoLocation = config.findManagedRepositoryById( REPOSITORY_ID ).getLocation();
assertAllArtifactsIncludingSupportArtifactsArePresent( repoLocation );
verifyChecksums( repoLocation );
}
}

View File

@ -0,0 +1,6 @@
<project>
<groupId>org.apache.archiva</groupId>
<artifactId>artifact-to-be-uploaded</artifactId>
<version>1.0</version>
<packaging>jar</packaging>
</project>