mirror of https://github.com/apache/archiva.git
[MRM-1330] refactor access around the concept of a "repository session". See Javadoc for some notes. This should be a single entry point for application users of either the metadata repository, resolution, or storage access. The session needs to be explicitly closed (and modifications saved) to facilitate some storage mechanisms.
git-svn-id: https://svn.apache.org/repos/asf/archiva/trunk@1053542 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
a44b2ac9dd
commit
f56609efad
|
@ -78,5 +78,10 @@
|
|||
<artifactId>xmlunit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
|
|
|
@ -20,6 +20,8 @@ package org.apache.maven.archiva.consumers.core.repository;
|
|||
*/
|
||||
|
||||
import org.apache.archiva.audit.AuditEvent;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.apache.maven.archiva.model.ArtifactReference;
|
||||
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
|
||||
|
@ -41,15 +43,19 @@ public abstract class AbstractRepositoryPurge
|
|||
|
||||
protected final ManagedRepositoryContent repository;
|
||||
|
||||
protected final RepositorySession repositorySession;
|
||||
|
||||
protected final List<RepositoryListener> listeners;
|
||||
|
||||
private Logger logger = LoggerFactory.getLogger( "org.apache.archiva.AuditLog" );
|
||||
|
||||
private static final char DELIM = ' ';
|
||||
|
||||
public AbstractRepositoryPurge( ManagedRepositoryContent repository, List<RepositoryListener> listeners )
|
||||
public AbstractRepositoryPurge( ManagedRepositoryContent repository, RepositorySession repositorySession,
|
||||
List<RepositoryListener> listeners )
|
||||
{
|
||||
this.repository = repository;
|
||||
this.repositorySession = repositorySession;
|
||||
this.listeners = listeners;
|
||||
}
|
||||
|
||||
|
@ -62,6 +68,7 @@ public abstract class AbstractRepositoryPurge
|
|||
{
|
||||
if ( references != null && !references.isEmpty() )
|
||||
{
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
for ( ArtifactReference reference : references )
|
||||
{
|
||||
File artifactFile = repository.toFile( reference );
|
||||
|
@ -69,12 +76,15 @@ public abstract class AbstractRepositoryPurge
|
|||
// FIXME: looks incomplete, might not delete related metadata?
|
||||
for ( RepositoryListener listener : listeners )
|
||||
{
|
||||
listener.deleteArtifact( repository.getId(), reference.getGroupId(), reference.getArtifactId(),
|
||||
reference.getVersion(), artifactFile.getName() );
|
||||
listener.deleteArtifact( metadataRepository, repository.getId(), reference.getGroupId(),
|
||||
reference.getArtifactId(), reference.getVersion(),
|
||||
artifactFile.getName() );
|
||||
}
|
||||
|
||||
// TODO: this needs to be logged
|
||||
artifactFile.delete();
|
||||
repositorySession.save();
|
||||
|
||||
triggerAuditEvent( repository.getRepository().getId(), ArtifactReference.toKey( reference ),
|
||||
AuditEvent.PURGE_ARTIFACT );
|
||||
purgeSupportFiles( artifactFile );
|
||||
|
|
|
@ -19,6 +19,8 @@ package org.apache.maven.archiva.consumers.core.repository;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.apache.maven.archiva.common.utils.VersionComparator;
|
||||
import org.apache.maven.archiva.common.utils.VersionUtil;
|
||||
|
@ -79,9 +81,10 @@ public class CleanupReleasedSnapshotsRepositoryPurge
|
|||
public CleanupReleasedSnapshotsRepositoryPurge( ManagedRepositoryContent repository, MetadataTools metadataTools,
|
||||
ArchivaConfiguration archivaConfig,
|
||||
RepositoryContentFactory repoContentFactory,
|
||||
RepositorySession repositorySession,
|
||||
List<RepositoryListener> listeners )
|
||||
{
|
||||
super( repository, listeners );
|
||||
super( repository, repositorySession, listeners );
|
||||
this.metadataTools = metadataTools;
|
||||
this.archivaConfig = archivaConfig;
|
||||
this.repoContentFactory = repoContentFactory;
|
||||
|
@ -168,6 +171,7 @@ public class CleanupReleasedSnapshotsRepositoryPurge
|
|||
artifactRef.getVersion(), artifactRef.getClassifier(),
|
||||
artifactRef.getType(), repository.getId() );
|
||||
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
for ( String version : snapshotVersions )
|
||||
{
|
||||
if ( releasedVersions.contains( VersionUtil.getReleaseVersion( version ) ) )
|
||||
|
@ -178,8 +182,9 @@ public class CleanupReleasedSnapshotsRepositoryPurge
|
|||
// FIXME: looks incomplete, might not delete related metadata?
|
||||
for ( RepositoryListener listener : listeners )
|
||||
{
|
||||
listener.deleteArtifact( repository.getId(), artifact.getGroupId(), artifact.getArtifactId(),
|
||||
artifact.getVersion(), artifactFile.getName() );
|
||||
listener.deleteArtifact( metadataRepository, repository.getId(), artifact.getGroupId(),
|
||||
artifact.getArtifactId(), artifact.getVersion(),
|
||||
artifactFile.getName() );
|
||||
}
|
||||
|
||||
needsMetadataUpdate = true;
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.maven.archiva.consumers.core.repository;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.apache.commons.lang.time.DateUtils;
|
||||
import org.apache.maven.archiva.common.utils.VersionComparator;
|
||||
|
@ -42,7 +43,6 @@ import java.util.regex.Matcher;
|
|||
|
||||
/**
|
||||
* Purge from repository all snapshots older than the specified days in the repository configuration.
|
||||
*
|
||||
*/
|
||||
public class DaysOldRepositoryPurge
|
||||
extends AbstractRepositoryPurge
|
||||
|
@ -53,10 +53,10 @@ public class DaysOldRepositoryPurge
|
|||
|
||||
private int retentionCount;
|
||||
|
||||
public DaysOldRepositoryPurge( ManagedRepositoryContent repository, int daysOlder,
|
||||
int retentionCount, List<RepositoryListener> listeners )
|
||||
public DaysOldRepositoryPurge( ManagedRepositoryContent repository, int daysOlder, int retentionCount,
|
||||
RepositorySession repositorySession, List<RepositoryListener> listeners )
|
||||
{
|
||||
super( repository, listeners );
|
||||
super( repository, repositorySession, listeners );
|
||||
this.daysOlder = daysOlder;
|
||||
this.retentionCount = retentionCount;
|
||||
timestampParser = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
|
||||
|
@ -105,8 +105,8 @@ public class DaysOldRepositoryPurge
|
|||
break;
|
||||
}
|
||||
|
||||
ArtifactReference newArtifactReference =
|
||||
repository.toArtifactReference( artifactFile.getAbsolutePath() );
|
||||
ArtifactReference newArtifactReference = repository.toArtifactReference(
|
||||
artifactFile.getAbsolutePath() );
|
||||
newArtifactReference.setVersion( version );
|
||||
|
||||
File newArtifactFile = repository.toFile( newArtifactReference );
|
||||
|
|
|
@ -19,6 +19,8 @@ package org.apache.maven.archiva.consumers.core.repository;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.ConfigurationNames;
|
||||
|
@ -46,11 +48,9 @@ import java.util.List;
|
|||
* Consumer for removing old snapshots in the repository based on the criteria
|
||||
* specified by the user.
|
||||
*
|
||||
*
|
||||
* @plexus.component
|
||||
* role="org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer"
|
||||
* role-hint="repository-purge"
|
||||
* instantiation-strategy="per-lookup"
|
||||
* @plexus.component role="org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer"
|
||||
* role-hint="repository-purge"
|
||||
* instantiation-strategy="per-lookup"
|
||||
*/
|
||||
public class RepositoryPurgeConsumer
|
||||
extends AbstractMonitoredConsumer
|
||||
|
@ -94,9 +94,20 @@ public class RepositoryPurgeConsumer
|
|||
|
||||
private boolean deleteReleasedSnapshots;
|
||||
|
||||
/** @plexus.requirement role="org.apache.archiva.repository.events.RepositoryListener" */
|
||||
/**
|
||||
* @plexus.requirement role="org.apache.archiva.repository.events.RepositoryListener"
|
||||
*/
|
||||
private List<RepositoryListener> listeners = Collections.emptyList();
|
||||
|
||||
|
||||
/**
|
||||
* TODO: this could be multiple implementations and needs to be configured.
|
||||
*
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private RepositorySessionFactory repositorySessionFactory;
|
||||
|
||||
private RepositorySession repositorySession;
|
||||
|
||||
public String getId()
|
||||
{
|
||||
return this.id;
|
||||
|
@ -125,27 +136,10 @@ public class RepositoryPurgeConsumer
|
|||
public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered )
|
||||
throws ConsumerException
|
||||
{
|
||||
ManagedRepositoryContent repositoryContent;
|
||||
try
|
||||
{
|
||||
ManagedRepositoryContent repositoryContent = repositoryFactory.getManagedRepositoryContent( repository
|
||||
.getId() );
|
||||
|
||||
if ( repository.getDaysOlder() != 0 )
|
||||
{
|
||||
repoPurge = new DaysOldRepositoryPurge( repositoryContent, repository.getDaysOlder(),
|
||||
repository.getRetentionCount(), listeners );
|
||||
}
|
||||
else
|
||||
{
|
||||
repoPurge = new RetentionCountRepositoryPurge( repositoryContent, repository.getRetentionCount(),
|
||||
listeners );
|
||||
}
|
||||
|
||||
cleanUp =
|
||||
new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, metadataTools, configuration,
|
||||
repositoryFactory, listeners );
|
||||
|
||||
deleteReleasedSnapshots = repository.isDeleteReleasedSnapshots();
|
||||
repositoryContent = repositoryFactory.getManagedRepositoryContent( repository.getId() );
|
||||
}
|
||||
catch ( RepositoryNotFoundException e )
|
||||
{
|
||||
|
@ -155,6 +149,24 @@ public class RepositoryPurgeConsumer
|
|||
{
|
||||
throw new ConsumerException( "Can't run repository purge: " + e.getMessage(), e );
|
||||
}
|
||||
|
||||
repositorySession = repositorySessionFactory.createSession();
|
||||
|
||||
if ( repository.getDaysOlder() != 0 )
|
||||
{
|
||||
repoPurge = new DaysOldRepositoryPurge( repositoryContent, repository.getDaysOlder(),
|
||||
repository.getRetentionCount(), repositorySession, listeners );
|
||||
}
|
||||
else
|
||||
{
|
||||
repoPurge = new RetentionCountRepositoryPurge( repositoryContent, repository.getRetentionCount(),
|
||||
repositorySession, listeners );
|
||||
}
|
||||
|
||||
cleanUp = new CleanupReleasedSnapshotsRepositoryPurge( repositoryContent, metadataTools, configuration,
|
||||
repositoryFactory, repositorySession, listeners );
|
||||
|
||||
deleteReleasedSnapshots = repository.isDeleteReleasedSnapshots();
|
||||
}
|
||||
|
||||
public void beginScan( ManagedRepositoryConfiguration repository, Date whenGathered, boolean executeOnEntireRepo )
|
||||
|
@ -189,7 +201,7 @@ public class RepositoryPurgeConsumer
|
|||
|
||||
public void completeScan()
|
||||
{
|
||||
/* do nothing */
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
public void completeScan( boolean executeOnEntireRepo )
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.maven.archiva.consumers.core.repository;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.apache.maven.archiva.common.utils.VersionComparator;
|
||||
import org.apache.maven.archiva.common.utils.VersionUtil;
|
||||
|
@ -36,17 +37,16 @@ import java.util.Set;
|
|||
|
||||
/**
|
||||
* Purge the repository by retention count. Retain only the specified number of snapshots.
|
||||
*
|
||||
*/
|
||||
public class RetentionCountRepositoryPurge
|
||||
extends AbstractRepositoryPurge
|
||||
{
|
||||
private int retentionCount;
|
||||
|
||||
public RetentionCountRepositoryPurge( ManagedRepositoryContent repository,
|
||||
int retentionCount, List<RepositoryListener> listeners )
|
||||
public RetentionCountRepositoryPurge( ManagedRepositoryContent repository, int retentionCount,
|
||||
RepositorySession repositorySession, List<RepositoryListener> listeners )
|
||||
{
|
||||
super( repository, listeners );
|
||||
super( repository, repositorySession, listeners );
|
||||
this.retentionCount = retentionCount;
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ public class RetentionCountRepositoryPurge
|
|||
{
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
ArtifactReference artifact = repository.toArtifactReference( path );
|
||||
|
||||
if ( VersionUtil.isSnapshot( artifact.getVersion() ) )
|
||||
|
@ -114,7 +114,7 @@ public class RetentionCountRepositoryPurge
|
|||
artifact.setVersion( version );
|
||||
artifact.setClassifier( reference.getClassifier() );
|
||||
artifact.setType( reference.getType() );
|
||||
|
||||
|
||||
try
|
||||
{
|
||||
Set<ArtifactReference> related = repository.getRelatedArtifacts( artifact );
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
package org.apache.archiva.metadata.repository;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
public class TestRepositorySessionFactory
|
||||
implements RepositorySessionFactory
|
||||
{
|
||||
private MetadataRepository repository;
|
||||
|
||||
private MetadataResolver resolver;
|
||||
|
||||
public RepositorySession createSession()
|
||||
{
|
||||
return new RepositorySession( repository, resolver );
|
||||
}
|
||||
|
||||
public void setRepository( MetadataRepository repository )
|
||||
{
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
public void setResolver( MetadataResolver resolver )
|
||||
{
|
||||
this.resolver = resolver;
|
||||
}
|
||||
}
|
|
@ -19,10 +19,11 @@ package org.apache.maven.archiva.consumers.core.repository;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
import org.apache.maven.archiva.model.ArchivaArtifact;
|
||||
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
|
||||
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
|
||||
import org.easymock.MockControl;
|
||||
|
@ -30,6 +31,9 @@ import org.easymock.MockControl;
|
|||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
*/
|
||||
public abstract class AbstractRepositoryPurgeTest
|
||||
|
@ -43,15 +47,20 @@ public abstract class AbstractRepositoryPurgeTest
|
|||
|
||||
public static final int TEST_DAYS_OLDER = 30;
|
||||
|
||||
public static final String PATH_TO_BY_DAYS_OLD_ARTIFACT = "org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/maven-install-plugin-2.2-20061118.060401-2.jar";
|
||||
public static final String PATH_TO_BY_DAYS_OLD_ARTIFACT =
|
||||
"org/apache/maven/plugins/maven-install-plugin/2.2-SNAPSHOT/maven-install-plugin-2.2-20061118.060401-2.jar";
|
||||
|
||||
public static final String PATH_TO_BY_DAYS_OLD_METADATA_DRIVEN_ARTIFACT = "org/codehaus/plexus/plexus-utils/1.4.3-SNAPSHOT/plexus-utils-1.4.3-20070113.163208-4.jar";
|
||||
public static final String PATH_TO_BY_DAYS_OLD_METADATA_DRIVEN_ARTIFACT =
|
||||
"org/codehaus/plexus/plexus-utils/1.4.3-SNAPSHOT/plexus-utils-1.4.3-20070113.163208-4.jar";
|
||||
|
||||
public static final String PATH_TO_BY_RETENTION_COUNT_ARTIFACT = "org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar";
|
||||
public static final String PATH_TO_BY_RETENTION_COUNT_ARTIFACT =
|
||||
"org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT/jruby-rake-plugin-1.0RC1-20070504.153317-1.jar";
|
||||
|
||||
public static final String PATH_TO_BY_RETENTION_COUNT_POM = "org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom";
|
||||
public static final String PATH_TO_BY_RETENTION_COUNT_POM =
|
||||
"org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT/castor-anttasks-1.1.2-20070506.163513-2.pom";
|
||||
|
||||
public static final String PATH_TO_TEST_ORDER_OF_DELETION = "org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070615.105019-3.jar";
|
||||
public static final String PATH_TO_TEST_ORDER_OF_DELETION =
|
||||
"org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT/maven-assembly-plugin-1.1.2-20070615.105019-3.jar";
|
||||
|
||||
protected static final String RELEASES_TEST_REPO_ID = "releases-test-repo-one";
|
||||
|
||||
|
@ -67,17 +76,25 @@ public abstract class AbstractRepositoryPurgeTest
|
|||
|
||||
protected RepositoryListener listener;
|
||||
|
||||
protected RepositorySession repositorySession;
|
||||
|
||||
protected MetadataRepository metadataRepository;
|
||||
|
||||
@Override
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
{
|
||||
super.setUp();
|
||||
|
||||
|
||||
listenerControl = MockControl.createControl( RepositoryListener.class );
|
||||
|
||||
listener = (RepositoryListener) listenerControl.getMock();
|
||||
|
||||
repositorySession = mock( RepositorySession.class );
|
||||
metadataRepository = mock( MetadataRepository.class );
|
||||
when( repositorySession.getRepository() ).thenReturn( metadataRepository );
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void tearDown()
|
||||
throws Exception
|
||||
|
@ -98,7 +115,7 @@ public abstract class AbstractRepositoryPurgeTest
|
|||
config.setSnapshots( true );
|
||||
config.setDeleteReleasedSnapshots( true );
|
||||
config.setRetentionCount( TEST_RETENTION_COUNT );
|
||||
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
|
@ -107,7 +124,7 @@ public abstract class AbstractRepositoryPurgeTest
|
|||
{
|
||||
if ( repo == null )
|
||||
{
|
||||
repo = (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class, "default" );
|
||||
repo = (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class, "default" );
|
||||
repo.setRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
|
||||
}
|
||||
|
||||
|
@ -123,7 +140,7 @@ public abstract class AbstractRepositoryPurgeTest
|
|||
{
|
||||
assertTrue( "File should exist: " + path, new File( path ).exists() );
|
||||
}
|
||||
|
||||
|
||||
protected File getTestRepoRoot()
|
||||
{
|
||||
return getTestFile( "target/test-" + getName() + "/" + TEST_REPO_ID );
|
||||
|
@ -135,16 +152,11 @@ public abstract class AbstractRepositoryPurgeTest
|
|||
File testDir = getTestRepoRoot();
|
||||
FileUtils.deleteDirectory( testDir );
|
||||
FileUtils.copyDirectory( getTestFile( "target/test-classes/" + TEST_REPO_ID ), testDir );
|
||||
|
||||
|
||||
File releasesTestDir = getTestFile( "target/test-" + getName() + "/" + RELEASES_TEST_REPO_ID );
|
||||
FileUtils.deleteDirectory( releasesTestDir );
|
||||
FileUtils.copyDirectory( getTestFile( "target/test-classes/" + RELEASES_TEST_REPO_ID ), releasesTestDir );
|
||||
|
||||
|
||||
return testDir.getAbsolutePath();
|
||||
}
|
||||
|
||||
protected ArchivaArtifact createArtifact( String groupId, String artifactId, String version, String type )
|
||||
{
|
||||
return new ArchivaArtifact( groupId, artifactId, version, null, type, TEST_REPO_ID );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.easymock.MockControl;
|
|||
|
||||
import java.io.File;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -43,29 +44,32 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
|
|||
|
||||
public static final String PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO =
|
||||
"org/apache/archiva/released-artifact-in-diff-repo/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.jar";
|
||||
|
||||
public static final String PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO = "org/apache/maven/plugins/maven-source-plugin/2.0.3-SNAPSHOT/maven-source-plugin-2.0.3-SNAPSHOT.jar";
|
||||
|
||||
public static final String PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO = "org/apache/maven/plugins/maven-plugin-plugin/2.3-SNAPSHOT/maven-plugin-plugin-2.3-SNAPSHOT.jar";
|
||||
|
||||
public static final String PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO =
|
||||
"org/apache/maven/plugins/maven-source-plugin/2.0.3-SNAPSHOT/maven-source-plugin-2.0.3-SNAPSHOT.jar";
|
||||
|
||||
public static final String PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO =
|
||||
"org/apache/maven/plugins/maven-plugin-plugin/2.3-SNAPSHOT/maven-plugin-plugin-2.3-SNAPSHOT.jar";
|
||||
|
||||
@Override
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
{
|
||||
super.setUp();
|
||||
|
||||
|
||||
MetadataTools metadataTools = (MetadataTools) lookup( MetadataTools.class );
|
||||
RepositoryContentFactory factory = (RepositoryContentFactory) lookup( RepositoryContentFactory.class, "cleanup-released-snapshots");
|
||||
|
||||
archivaConfiguration =
|
||||
(ArchivaConfiguration) lookup( ArchivaConfiguration.class, "cleanup-released-snapshots" );
|
||||
RepositoryContentFactory factory = (RepositoryContentFactory) lookup( RepositoryContentFactory.class,
|
||||
"cleanup-released-snapshots" );
|
||||
|
||||
archivaConfiguration = (ArchivaConfiguration) lookup( ArchivaConfiguration.class,
|
||||
"cleanup-released-snapshots" );
|
||||
|
||||
listenerControl = MockControl.createControl( RepositoryListener.class );
|
||||
|
||||
|
||||
listener = (RepositoryListener) listenerControl.getMock();
|
||||
repoPurge =
|
||||
new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), metadataTools, archivaConfiguration, factory,
|
||||
Collections.singletonList( listener ) );
|
||||
List<RepositoryListener> listeners = Collections.singletonList( listener );
|
||||
repoPurge = new CleanupReleasedSnapshotsRepositoryPurge( getRepository(), metadataTools, archivaConfiguration,
|
||||
factory, repositorySession, listeners );
|
||||
}
|
||||
|
||||
public void testReleasedSnapshotsExistsInSameRepo()
|
||||
|
@ -74,20 +78,20 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
|
|||
Configuration config = archivaConfiguration.getConfiguration();
|
||||
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
|
||||
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
|
||||
|
||||
String repoRoot = prepareTestRepos();
|
||||
|
||||
String repoRoot = prepareTestRepos();
|
||||
|
||||
// test listeners for the correct artifacts
|
||||
listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-plugin-plugin",
|
||||
"2.3-SNAPSHOT", "maven-plugin-plugin-2.3-SNAPSHOT.jar" );
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
|
||||
"maven-plugin-plugin", "2.3-SNAPSHOT", "maven-plugin-plugin-2.3-SNAPSHOT.jar" );
|
||||
listenerControl.replay();
|
||||
|
||||
|
||||
repoPurge.process( PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
|
||||
|
||||
|
||||
listenerControl.verify();
|
||||
|
||||
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-plugin-plugin";
|
||||
|
||||
|
||||
// check if the snapshot was removed
|
||||
assertDeleted( projectRoot + "/2.3-SNAPSHOT" );
|
||||
assertDeleted( projectRoot + "/2.3-SNAPSHOT/maven-plugin-plugin-2.3-SNAPSHOT.jar" );
|
||||
|
@ -111,19 +115,19 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
|
|||
|
||||
// check if metadata file was updated
|
||||
File artifactMetadataFile = new File( projectRoot + "/maven-metadata.xml" );
|
||||
|
||||
|
||||
String metadataXml = FileUtils.readFileToString( artifactMetadataFile, null );
|
||||
|
||||
String expectedVersions = "<expected><versions><version>2.2</version>" +
|
||||
"<version>2.3</version></versions></expected>";
|
||||
|
||||
|
||||
String expectedVersions =
|
||||
"<expected><versions><version>2.2</version>" + "<version>2.3</version></versions></expected>";
|
||||
|
||||
XMLAssert.assertXpathEvaluatesTo( "2.3", "//metadata/versioning/release", metadataXml );
|
||||
XMLAssert.assertXpathEvaluatesTo( "2.3", "//metadata/versioning/latest", metadataXml );
|
||||
XMLAssert.assertXpathsEqual( "//expected/versions/version", expectedVersions,
|
||||
"//metadata/versioning/versions/version", metadataXml );
|
||||
XMLAssert.assertXpathEvaluatesTo( "20070315032817", "//metadata/versioning/lastUpdated", metadataXml );
|
||||
}
|
||||
|
||||
|
||||
public void testNonArtifactFile()
|
||||
throws Exception
|
||||
{
|
||||
|
@ -149,25 +153,26 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
|
|||
|
||||
public void testReleasedSnapshotsExistsInDifferentRepo()
|
||||
throws Exception
|
||||
{
|
||||
{
|
||||
Configuration config = archivaConfiguration.getConfiguration();
|
||||
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
|
||||
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
|
||||
config.addManagedRepository( getRepoConfiguration( RELEASES_TEST_REPO_ID, RELEASES_TEST_REPO_NAME ) );
|
||||
|
||||
String repoRoot = prepareTestRepos();
|
||||
|
||||
String repoRoot = prepareTestRepos();
|
||||
|
||||
// test listeners for the correct artifacts
|
||||
listener.deleteArtifact( getRepository().getId(), "org.apache.archiva", "released-artifact-in-diff-repo",
|
||||
"1.0-SNAPSHOT", "released-artifact-in-diff-repo-1.0-SNAPSHOT.jar" );
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.archiva",
|
||||
"released-artifact-in-diff-repo", "1.0-SNAPSHOT",
|
||||
"released-artifact-in-diff-repo-1.0-SNAPSHOT.jar" );
|
||||
listenerControl.replay();
|
||||
|
||||
|
||||
repoPurge.process( PATH_TO_RELEASED_SNAPSHOT_IN_DIFF_REPO );
|
||||
|
||||
listenerControl.verify();
|
||||
|
||||
|
||||
String projectRoot = repoRoot + "/org/apache/archiva/released-artifact-in-diff-repo";
|
||||
|
||||
|
||||
// check if the snapshot was removed
|
||||
assertDeleted( projectRoot + "/1.0-SNAPSHOT" );
|
||||
assertDeleted( projectRoot + "/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.jar" );
|
||||
|
@ -177,38 +182,38 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
|
|||
assertDeleted( projectRoot + "/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.pom.md5" );
|
||||
assertDeleted( projectRoot + "/1.0-SNAPSHOT/released-artifact-in-diff-repo-1.0-SNAPSHOT.pom.sha1" );
|
||||
|
||||
String releasesProjectRoot =
|
||||
getTestFile( "target/test-" + getName() + "/releases-test-repo-one" ).getAbsolutePath() +
|
||||
"/org/apache/archiva/released-artifact-in-diff-repo";
|
||||
|
||||
String releasesProjectRoot = getTestFile(
|
||||
"target/test-" + getName() + "/releases-test-repo-one" ).getAbsolutePath() +
|
||||
"/org/apache/archiva/released-artifact-in-diff-repo";
|
||||
|
||||
// check if the released version was not removed
|
||||
assertExists( releasesProjectRoot + "/1.0" );
|
||||
assertExists( releasesProjectRoot + "/1.0" );
|
||||
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.jar" );
|
||||
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.jar.md5" );
|
||||
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.jar.sha1" );
|
||||
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.pom" );
|
||||
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.pom.md5" );
|
||||
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.pom.sha1" );
|
||||
assertExists( releasesProjectRoot + "/1.0/released-artifact-in-diff-repo-1.0.pom.sha1" );
|
||||
}
|
||||
|
||||
public void testHigherSnapshotExistsInSameRepo()
|
||||
throws Exception
|
||||
{
|
||||
{
|
||||
Configuration config = archivaConfiguration.getConfiguration();
|
||||
config.removeManagedRepository( config.findManagedRepositoryById( TEST_REPO_ID ) );
|
||||
config.addManagedRepository( getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ) );
|
||||
|
||||
|
||||
String repoRoot = prepareTestRepos();
|
||||
|
||||
// test listeners for the correct artifacts - no deletions
|
||||
listenerControl.replay();
|
||||
|
||||
|
||||
repoPurge.process( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_HIGHER_SNAPSHOT_EXISTS_IN_SAME_REPO );
|
||||
|
||||
listenerControl.verify();
|
||||
|
||||
|
||||
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-source-plugin";
|
||||
|
||||
|
||||
// check if the snapshot was not removed
|
||||
assertExists( projectRoot + "/2.0.3-SNAPSHOT" );
|
||||
assertExists( projectRoot + "/2.0.3-SNAPSHOT/maven-source-plugin-2.0.3-SNAPSHOT.jar" );
|
||||
|
@ -231,10 +236,10 @@ public class CleanupReleasedSnapshotsRepositoryPurgeTest
|
|||
File artifactMetadataFile = new File( projectRoot + "/maven-metadata.xml" );
|
||||
|
||||
String metadataXml = FileUtils.readFileToString( artifactMetadataFile, null );
|
||||
|
||||
|
||||
String expectedVersions = "<expected><versions><version>2.0.3-SNAPSHOT</version>" +
|
||||
"<version>2.0.4-SNAPSHOT</version></versions></expected>";
|
||||
|
||||
"<version>2.0.4-SNAPSHOT</version></versions></expected>";
|
||||
|
||||
XMLAssert.assertXpathEvaluatesTo( "2.0.4-SNAPSHOT", "//metadata/versioning/latest", metadataXml );
|
||||
XMLAssert.assertXpathsEqual( "//expected/versions/version", expectedVersions,
|
||||
"//metadata/versioning/versions/version", metadataXml );
|
||||
|
|
|
@ -19,12 +19,15 @@ package org.apache.maven.archiva.consumers.core.repository;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.apache.commons.lang.time.DateUtils;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
|
||||
import java.io.File;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*/
|
||||
|
@ -46,11 +49,10 @@ public class DaysOldRepositoryPurgeTest
|
|||
public void testByLastModified()
|
||||
throws Exception
|
||||
{
|
||||
repoPurge =
|
||||
new DaysOldRepositoryPurge( getRepository(),
|
||||
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
|
||||
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
|
||||
Collections.singletonList( listener ) );
|
||||
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
|
||||
repoPurge = new DaysOldRepositoryPurge( getRepository(), repoConfiguration.getDaysOlder(),
|
||||
repoConfiguration.getRetentionCount(), repositorySession,
|
||||
Collections.singletonList( listener ) );
|
||||
|
||||
String repoRoot = prepareTestRepos();
|
||||
|
||||
|
@ -59,14 +61,16 @@ public class DaysOldRepositoryPurgeTest
|
|||
setLastModified( projectRoot + "/2.2-SNAPSHOT/", OLD_TIMESTAMP );
|
||||
|
||||
// test listeners for the correct artifacts
|
||||
listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-install-plugin",
|
||||
"2.2-SNAPSHOT", "maven-install-plugin-2.2-SNAPSHOT.jar" );
|
||||
listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-install-plugin",
|
||||
"2.2-SNAPSHOT", "maven-install-plugin-2.2-SNAPSHOT.pom" );
|
||||
listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-install-plugin",
|
||||
"2.2-20061118.060401-2", "maven-install-plugin-2.2-20061118.060401-2.jar" );
|
||||
listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-install-plugin",
|
||||
"2.2-20061118.060401-2", "maven-install-plugin-2.2-20061118.060401-2.pom" );
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
|
||||
"maven-install-plugin", "2.2-SNAPSHOT", "maven-install-plugin-2.2-SNAPSHOT.jar" );
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
|
||||
"maven-install-plugin", "2.2-SNAPSHOT", "maven-install-plugin-2.2-SNAPSHOT.pom" );
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
|
||||
"maven-install-plugin", "2.2-20061118.060401-2",
|
||||
"maven-install-plugin-2.2-20061118.060401-2.jar" );
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
|
||||
"maven-install-plugin", "2.2-20061118.060401-2",
|
||||
"maven-install-plugin-2.2-20061118.060401-2.pom" );
|
||||
listenerControl.replay();
|
||||
|
||||
repoPurge.process( PATH_TO_BY_DAYS_OLD_ARTIFACT );
|
||||
|
@ -106,10 +110,10 @@ public class DaysOldRepositoryPurgeTest
|
|||
public void testOrderOfDeletion()
|
||||
throws Exception
|
||||
{
|
||||
repoPurge =
|
||||
new DaysOldRepositoryPurge( getRepository(), getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
|
||||
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
|
||||
Collections.singletonList( listener ) );
|
||||
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
|
||||
List<RepositoryListener> listeners = Collections.singletonList( listener );
|
||||
repoPurge = new DaysOldRepositoryPurge( getRepository(), repoConfiguration.getDaysOlder(),
|
||||
repoConfiguration.getRetentionCount(), repositorySession, listeners );
|
||||
|
||||
String repoRoot = prepareTestRepos();
|
||||
|
||||
|
@ -118,10 +122,12 @@ public class DaysOldRepositoryPurgeTest
|
|||
setLastModified( projectRoot + "/1.1.2-SNAPSHOT/", OLD_TIMESTAMP );
|
||||
|
||||
// test listeners for the correct artifacts
|
||||
listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-assembly-plugin",
|
||||
"1.1.2-20070427.065136-1", "maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
|
||||
listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-assembly-plugin",
|
||||
"1.1.2-20070427.065136-1", "maven-assembly-plugin-1.1.2-20070427.065136-1.pom" );
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
|
||||
"maven-assembly-plugin", "1.1.2-20070427.065136-1",
|
||||
"maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
|
||||
"maven-assembly-plugin", "1.1.2-20070427.065136-1",
|
||||
"maven-assembly-plugin-1.1.2-20070427.065136-1.pom" );
|
||||
listenerControl.replay();
|
||||
|
||||
repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION );
|
||||
|
@ -154,11 +160,10 @@ public class DaysOldRepositoryPurgeTest
|
|||
public void testMetadataDrivenSnapshots()
|
||||
throws Exception
|
||||
{
|
||||
repoPurge =
|
||||
new DaysOldRepositoryPurge( getRepository(),
|
||||
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getDaysOlder(),
|
||||
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
|
||||
Collections.singletonList( listener ) );
|
||||
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
|
||||
List<RepositoryListener> listeners = Collections.singletonList( listener );
|
||||
repoPurge = new DaysOldRepositoryPurge( getRepository(), repoConfiguration.getDaysOlder(),
|
||||
repoConfiguration.getRetentionCount(), repositorySession, listeners );
|
||||
|
||||
String repoRoot = prepareTestRepos();
|
||||
|
||||
|
@ -185,9 +190,9 @@ public class DaysOldRepositoryPurgeTest
|
|||
}
|
||||
|
||||
// test listeners for the correct artifacts
|
||||
listener.deleteArtifact( getRepository().getId(), "org.codehaus.plexus", "plexus-utils",
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.codehaus.plexus", "plexus-utils",
|
||||
"1.4.3-20070113.163208-4", "plexus-utils-1.4.3-20070113.163208-4.jar" );
|
||||
listener.deleteArtifact( getRepository().getId(), "org.codehaus.plexus", "plexus-utils",
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.codehaus.plexus", "plexus-utils",
|
||||
"1.4.3-20070113.163208-4", "plexus-utils-1.4.3-20070113.163208-4.pom" );
|
||||
listenerControl.replay();
|
||||
|
||||
|
|
|
@ -19,6 +19,8 @@ package org.apache.maven.archiva.consumers.core.repository;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.TestRepositorySessionFactory;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.archiva.common.utils.BaseFile;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
|
@ -62,13 +64,12 @@ public class RepositoryPurgeConsumerTest
|
|||
FileTypes fileTypes = (FileTypes) lookup( FileTypes.class );
|
||||
fileTypes.afterConfigurationChange( null, "repositoryScanning.fileTypes", null );
|
||||
|
||||
KnownRepositoryContentConsumer repoPurgeConsumer =
|
||||
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class, "repository-purge" );
|
||||
KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
|
||||
KnownRepositoryContentConsumer.class, "repository-purge" );
|
||||
|
||||
File repoLocation = getTestFile( "target/test-" + getName() + "/test-repo" );
|
||||
|
||||
File localFile =
|
||||
new File( repoLocation, path );
|
||||
File localFile = new File( repoLocation, path );
|
||||
|
||||
ConsumerWantsFilePredicate predicate = new ConsumerWantsFilePredicate();
|
||||
BaseFile baseFile = new BaseFile( repoLocation, localFile );
|
||||
|
@ -83,16 +84,15 @@ public class RepositoryPurgeConsumerTest
|
|||
File[] contents = dir.listFiles();
|
||||
for ( int i = 0; i < contents.length; i++ )
|
||||
{
|
||||
contents[i].setLastModified( 1179382029 );
|
||||
contents[i].setLastModified( 1179382029 );
|
||||
}
|
||||
}
|
||||
|
||||
public void testConsumerByRetentionCount()
|
||||
throws Exception
|
||||
{
|
||||
KnownRepositoryContentConsumer repoPurgeConsumer =
|
||||
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
|
||||
"repo-purge-consumer-by-retention-count" );
|
||||
KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
|
||||
KnownRepositoryContentConsumer.class, "repo-purge-consumer-by-retention-count" );
|
||||
|
||||
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
|
||||
repoConfiguration.setDaysOlder( 0 ); // force days older off to allow retention count purge to execute.
|
||||
|
@ -141,8 +141,8 @@ public class RepositoryPurgeConsumerTest
|
|||
private void addRepoToConfiguration( String configHint, ManagedRepositoryConfiguration repoConfiguration )
|
||||
throws Exception
|
||||
{
|
||||
ArchivaConfiguration archivaConfiguration =
|
||||
(ArchivaConfiguration) lookup( ArchivaConfiguration.class, configHint );
|
||||
ArchivaConfiguration archivaConfiguration = (ArchivaConfiguration) lookup( ArchivaConfiguration.class,
|
||||
configHint );
|
||||
Configuration configuration = archivaConfiguration.getConfiguration();
|
||||
configuration.removeManagedRepository( configuration.findManagedRepositoryById( repoConfiguration.getId() ) );
|
||||
configuration.addManagedRepository( repoConfiguration );
|
||||
|
@ -151,9 +151,8 @@ public class RepositoryPurgeConsumerTest
|
|||
public void testConsumerByDaysOld()
|
||||
throws Exception
|
||||
{
|
||||
KnownRepositoryContentConsumer repoPurgeConsumer =
|
||||
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
|
||||
"repo-purge-consumer-by-days-old" );
|
||||
KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
|
||||
KnownRepositoryContentConsumer.class, "repo-purge-consumer-by-days-old" );
|
||||
|
||||
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
|
||||
repoConfiguration.setDaysOlder( TEST_DAYS_OLDER );
|
||||
|
@ -182,7 +181,7 @@ public class RepositoryPurgeConsumerTest
|
|||
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070513.034619-5.pom" );
|
||||
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070513.034619-5.pom.md5" );
|
||||
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070513.034619-5.pom.sha1" );
|
||||
|
||||
|
||||
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070510.010101-4.jar" );
|
||||
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070510.010101-4.jar.md5" );
|
||||
assertExists( projectRoot + "/2.2-SNAPSHOT/maven-install-plugin-2.2-20070510.010101-4.jar.sha1" );
|
||||
|
@ -206,9 +205,8 @@ public class RepositoryPurgeConsumerTest
|
|||
public void testReleasedSnapshotsWereNotCleaned()
|
||||
throws Exception
|
||||
{
|
||||
KnownRepositoryContentConsumer repoPurgeConsumer =
|
||||
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
|
||||
"repo-purge-consumer-by-retention-count" );
|
||||
KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
|
||||
KnownRepositoryContentConsumer.class, "repo-purge-consumer-by-retention-count" );
|
||||
|
||||
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
|
||||
repoConfiguration.setDeleteReleasedSnapshots( false ); // Set to NOT delete released snapshots.
|
||||
|
@ -218,7 +216,8 @@ public class RepositoryPurgeConsumerTest
|
|||
|
||||
String repoRoot = prepareTestRepos();
|
||||
|
||||
repoPurgeConsumer.processFile( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
|
||||
repoPurgeConsumer.processFile(
|
||||
CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
|
||||
|
||||
// check if the snapshot wasn't removed
|
||||
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-plugin-plugin";
|
||||
|
@ -247,9 +246,8 @@ public class RepositoryPurgeConsumerTest
|
|||
public void testReleasedSnapshotsWereCleaned()
|
||||
throws Exception
|
||||
{
|
||||
KnownRepositoryContentConsumer repoPurgeConsumer =
|
||||
(KnownRepositoryContentConsumer) lookup( KnownRepositoryContentConsumer.class,
|
||||
"repo-purge-consumer-by-days-old" );
|
||||
KnownRepositoryContentConsumer repoPurgeConsumer = (KnownRepositoryContentConsumer) lookup(
|
||||
KnownRepositoryContentConsumer.class, "repo-purge-consumer-by-days-old" );
|
||||
|
||||
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
|
||||
repoConfiguration.setDeleteReleasedSnapshots( true );
|
||||
|
@ -259,7 +257,8 @@ public class RepositoryPurgeConsumerTest
|
|||
|
||||
String repoRoot = prepareTestRepos();
|
||||
|
||||
repoPurgeConsumer.processFile( CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
|
||||
repoPurgeConsumer.processFile(
|
||||
CleanupReleasedSnapshotsRepositoryPurgeTest.PATH_TO_RELEASED_SNAPSHOT_IN_SAME_REPO );
|
||||
|
||||
String projectRoot = repoRoot + "/org/apache/maven/plugins/maven-plugin-plugin";
|
||||
|
||||
|
@ -285,4 +284,14 @@ public class RepositoryPurgeConsumerTest
|
|||
"//metadata/versioning/versions/version", metadataXml );
|
||||
XMLAssert.assertXpathEvaluatesTo( "20070315032817", "//metadata/versioning/lastUpdated", metadataXml );
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
{
|
||||
super.setUp();
|
||||
|
||||
TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
|
||||
factory.setRepository( metadataRepository );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
package org.apache.maven.archiva.consumers.core.repository;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
|
@ -11,7 +9,7 @@ import java.util.Collections;
|
|||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
|
@ -21,24 +19,27 @@ import java.util.Collections;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Test RetentionsCountRepositoryPurgeTest
|
||||
*
|
||||
*/
|
||||
public class RetentionCountRepositoryPurgeTest
|
||||
extends AbstractRepositoryPurgeTest
|
||||
{
|
||||
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
{
|
||||
super.setUp();
|
||||
|
||||
repoPurge =
|
||||
new RetentionCountRepositoryPurge(
|
||||
getRepository(),
|
||||
getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME ).getRetentionCount(),
|
||||
Collections.singletonList( listener ) );
|
||||
ManagedRepositoryConfiguration repoConfiguration = getRepoConfiguration( TEST_REPO_ID, TEST_REPO_NAME );
|
||||
List<RepositoryListener> listeners = Collections.singletonList( listener );
|
||||
repoPurge = new RetentionCountRepositoryPurge( getRepository(), repoConfiguration.getRetentionCount(),
|
||||
repositorySession, listeners );
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -50,18 +51,18 @@ public class RetentionCountRepositoryPurgeTest
|
|||
String repoRoot = prepareTestRepos();
|
||||
|
||||
// test listeners for the correct artifacts
|
||||
listener.deleteArtifact( getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
|
||||
"1.0RC1-20070504.153317-1", "jruby-rake-plugin-1.0RC1-20070504.153317-1.jar" );
|
||||
listener.deleteArtifact( getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
|
||||
"1.0RC1-20070504.153317-1", "jruby-rake-plugin-1.0RC1-20070504.153317-1.pom" );
|
||||
listener.deleteArtifact( getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
|
||||
"1.0RC1-20070504.160758-2", "jruby-rake-plugin-1.0RC1-20070504.160758-2.jar" );
|
||||
listener.deleteArtifact( getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.jruby.plugins", "jruby-rake-plugin",
|
||||
"1.0RC1-20070504.160758-2", "jruby-rake-plugin-1.0RC1-20070504.160758-2.pom" );
|
||||
listenerControl.replay();
|
||||
|
||||
|
||||
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_ARTIFACT );
|
||||
|
||||
|
||||
listenerControl.verify();
|
||||
|
||||
String versionRoot = repoRoot + "/org/jruby/plugins/jruby-rake-plugin/1.0RC1-SNAPSHOT";
|
||||
|
@ -106,18 +107,18 @@ public class RetentionCountRepositoryPurgeTest
|
|||
String repoRoot = prepareTestRepos();
|
||||
|
||||
// test listeners for the correct artifacts
|
||||
listener.deleteArtifact( getRepository().getId(), "org.codehaus.castor", "castor-anttasks",
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.codehaus.castor", "castor-anttasks",
|
||||
"1.1.2-20070427.065136-1", "castor-anttasks-1.1.2-20070427.065136-1.jar" );
|
||||
listener.deleteArtifact( getRepository().getId(), "org.codehaus.castor", "castor-anttasks",
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.codehaus.castor", "castor-anttasks",
|
||||
"1.1.2-20070427.065136-1", "castor-anttasks-1.1.2-20070427.065136-1.pom" );
|
||||
listenerControl.replay();
|
||||
|
||||
|
||||
repoPurge.process( PATH_TO_BY_RETENTION_COUNT_POM );
|
||||
|
||||
|
||||
listenerControl.verify();
|
||||
|
||||
String versionRoot = repoRoot + "/org/codehaus/castor/castor-anttasks/1.1.2-SNAPSHOT";
|
||||
|
||||
|
||||
// assert if removed from repo
|
||||
assertDeleted( versionRoot + "/castor-anttasks-1.1.2-20070427.065136-1.jar" );
|
||||
assertDeleted( versionRoot + "/castor-anttasks-1.1.2-20070427.065136-1.jar.md5" );
|
||||
|
@ -154,19 +155,20 @@ public class RetentionCountRepositoryPurgeTest
|
|||
String repoRoot = prepareTestRepos();
|
||||
|
||||
// test listeners for the correct artifacts
|
||||
listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-assembly-plugin",
|
||||
"1.1.2-20070427.065136-1", "maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
|
||||
listener.deleteArtifact( getRepository().getId(), "org.apache.maven.plugins", "maven-assembly-plugin",
|
||||
"1.1.2-20070427.065136-1", "maven-assembly-plugin-1.1.2-20070427.065136-1.pom" );
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
|
||||
"maven-assembly-plugin", "1.1.2-20070427.065136-1",
|
||||
"maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
|
||||
listener.deleteArtifact( metadataRepository, getRepository().getId(), "org.apache.maven.plugins",
|
||||
"maven-assembly-plugin", "1.1.2-20070427.065136-1",
|
||||
"maven-assembly-plugin-1.1.2-20070427.065136-1.pom" );
|
||||
listenerControl.replay();
|
||||
|
||||
|
||||
repoPurge.process( PATH_TO_TEST_ORDER_OF_DELETION );
|
||||
|
||||
listenerControl.verify();
|
||||
|
||||
String versionRoot = repoRoot +
|
||||
"/org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT";
|
||||
|
||||
String versionRoot = repoRoot + "/org/apache/maven/plugins/maven-assembly-plugin/1.1.2-SNAPSHOT";
|
||||
|
||||
assertDeleted( versionRoot + "/maven-assembly-plugin-1.1.2-20070427.065136-1.jar" );
|
||||
assertDeleted( versionRoot + "/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.sha1" );
|
||||
assertDeleted( versionRoot + "/maven-assembly-plugin-1.1.2-20070427.065136-1.jar.md5" );
|
||||
|
|
|
@ -42,6 +42,9 @@
|
|||
<role>org.apache.maven.archiva.configuration.FileTypes</role>
|
||||
<role-hint>retention-count</role-hint>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
|
||||
</requirement>
|
||||
</requirements>
|
||||
<configuration>
|
||||
<id>repository-purge</id>
|
||||
|
@ -125,6 +128,9 @@
|
|||
<role>org.apache.maven.archiva.configuration.FileTypes</role>
|
||||
<role-hint>days-old</role-hint>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
|
||||
</requirement>
|
||||
</requirements>
|
||||
<configuration>
|
||||
<id>repository-purge</id>
|
||||
|
@ -185,5 +191,9 @@
|
|||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
<component>
|
||||
<role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
|
||||
<implementation>org.apache.archiva.metadata.repository.TestRepositorySessionFactory</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
||||
|
|
|
@ -24,8 +24,11 @@ import org.apache.archiva.metadata.model.ProjectMetadata;
|
|||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolutionException;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorage;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException;
|
||||
import org.apache.maven.archiva.common.utils.VersionUtil;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.ConfigurationNames;
|
||||
|
@ -81,9 +84,11 @@ public class ArchivaMetadataCreationConsumer
|
|||
private List<String> includes = new ArrayList<String>();
|
||||
|
||||
/**
|
||||
* FIXME: can be of other types
|
||||
*
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
private RepositorySessionFactory repositorySessionFactory;
|
||||
|
||||
/**
|
||||
* FIXME: this needs to be configurable based on storage type - and could also be instantiated per repo. Change to a
|
||||
|
@ -149,31 +154,35 @@ public class ArchivaMetadataCreationConsumer
|
|||
project.setId( artifact.getProject() );
|
||||
|
||||
String projectVersion = VersionUtil.getBaseVersion( artifact.getVersion() );
|
||||
// FIXME: maybe not too efficient since it may have already been read and stored for this artifact
|
||||
ProjectVersionMetadata versionMetadata = null;
|
||||
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
versionMetadata = repositoryStorage.readProjectVersionMetadata( repoId, artifact.getNamespace(),
|
||||
artifact.getProject(), projectVersion );
|
||||
}
|
||||
catch ( MetadataResolutionException e )
|
||||
{
|
||||
log.warn( "Error occurred resolving POM for artifact: " + path + "; message: " + e.getMessage() );
|
||||
}
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
|
||||
boolean createVersionMetadata = false;
|
||||
if ( versionMetadata == null )
|
||||
{
|
||||
log.warn( "Missing or invalid POM for artifact: " + path + "; creating empty metadata" );
|
||||
versionMetadata = new ProjectVersionMetadata();
|
||||
versionMetadata.setId( projectVersion );
|
||||
versionMetadata.setIncomplete( true );
|
||||
createVersionMetadata = true;
|
||||
}
|
||||
boolean createVersionMetadata = false;
|
||||
|
||||
// FIXME: maybe not too efficient since it may have already been read and stored for this artifact
|
||||
ProjectVersionMetadata versionMetadata = null;
|
||||
try
|
||||
{
|
||||
versionMetadata = repositoryStorage.readProjectVersionMetadata( repoId, artifact.getNamespace(),
|
||||
artifact.getProject(), projectVersion );
|
||||
}
|
||||
catch ( RepositoryStorageMetadataNotFoundException e )
|
||||
{
|
||||
log.warn( "Missing or invalid POM for artifact: " + path + "; creating empty metadata" );
|
||||
|
||||
versionMetadata = new ProjectVersionMetadata();
|
||||
versionMetadata.setId( projectVersion );
|
||||
versionMetadata.setIncomplete( true );
|
||||
createVersionMetadata = true;
|
||||
}
|
||||
catch ( RepositoryStorageMetadataInvalidException e )
|
||||
{
|
||||
log.warn( "Error occurred resolving POM for artifact: " + path + "; message: " + e.getMessage() );
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// FIXME: transaction
|
||||
// read the metadata and update it if it is newer or doesn't exist
|
||||
artifact.setWhenGathered( whenGathered );
|
||||
metadataRepository.updateArtifact( repoId, project.getNamespace(), project.getId(), projectVersion,
|
||||
|
@ -184,10 +193,16 @@ public class ArchivaMetadataCreationConsumer
|
|||
versionMetadata );
|
||||
}
|
||||
metadataRepository.updateProject( repoId, project );
|
||||
repositorySession.save();
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Error occurred persisting metadata for artifact: " + path + "; message: " + e.getMessage(), e );
|
||||
repositorySession.revert();
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
package org.apache.archiva.metadata.repository;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
public class TestRepositorySessionFactory
|
||||
implements RepositorySessionFactory
|
||||
{
|
||||
private MetadataResolver resolver;
|
||||
|
||||
public RepositorySession createSession()
|
||||
{
|
||||
return new RepositorySession( new TestMetadataRepository(), resolver );
|
||||
}
|
||||
|
||||
public void setResolver( MetadataResolver resolver )
|
||||
{
|
||||
this.resolver = resolver;
|
||||
}
|
||||
}
|
|
@ -19,17 +19,6 @@ package org.apache.maven.archiva.proxy;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.lang.ArrayUtils;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
|
@ -48,6 +37,17 @@ import org.codehaus.plexus.spring.PlexusInSpringTestCase;
|
|||
import org.easymock.ArgumentsMatcher;
|
||||
import org.easymock.MockControl;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* AbstractProxyTestCase
|
||||
*
|
||||
|
@ -88,52 +88,57 @@ public abstract class AbstractProxyTestCase
|
|||
|
||||
protected static final String REPOPATH_LEGACY_MANAGED_TARGET = "target/test-repository/legacy-managed";
|
||||
|
||||
protected static final ArgumentsMatcher customWagonGetIfNewerMatcher = new ArgumentsMatcher() {
|
||||
protected static final ArgumentsMatcher customWagonGetIfNewerMatcher = new ArgumentsMatcher()
|
||||
{
|
||||
|
||||
public boolean matches(Object[] expected, Object[] actual) {
|
||||
if (expected.length < 1 || actual.length < 1)
|
||||
public boolean matches( Object[] expected, Object[] actual )
|
||||
{
|
||||
if ( expected.length < 1 || actual.length < 1 )
|
||||
{
|
||||
return false;
|
||||
}
|
||||
return MockControl.ARRAY_MATCHER.matches(ArrayUtils.remove(expected, 1), ArrayUtils.remove(actual, 1));
|
||||
return MockControl.ARRAY_MATCHER.matches( ArrayUtils.remove( expected, 1 ), ArrayUtils.remove( actual,
|
||||
1 ) );
|
||||
}
|
||||
|
||||
public String toString(Object[] arguments) {
|
||||
return ArrayUtils.toString(arguments);
|
||||
public String toString( Object[] arguments )
|
||||
{
|
||||
return ArrayUtils.toString( arguments );
|
||||
}
|
||||
};
|
||||
|
||||
protected static final ArgumentsMatcher customWagonGetMatcher = new ArgumentsMatcher() {
|
||||
protected static final ArgumentsMatcher customWagonGetMatcher = new ArgumentsMatcher()
|
||||
{
|
||||
|
||||
public boolean matches(Object[] expected, Object[] actual)
|
||||
public boolean matches( Object[] expected, Object[] actual )
|
||||
{
|
||||
if ( expected.length == 2 && actual.length == 2 )
|
||||
{
|
||||
if (expected.length == 2 && actual.length == 2)
|
||||
if ( expected[0] == null && actual[0] == null )
|
||||
{
|
||||
if (expected[0] == null && actual[0] == null)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (expected[0] == null)
|
||||
{
|
||||
return actual[0] == null;
|
||||
}
|
||||
|
||||
if (actual[0] == null)
|
||||
{
|
||||
return expected[0] == null;
|
||||
}
|
||||
|
||||
return expected[0].equals(actual[0]);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public String toString(Object[] arguments)
|
||||
{
|
||||
return ArrayUtils.toString(arguments);
|
||||
if ( expected[0] == null )
|
||||
{
|
||||
return actual[0] == null;
|
||||
}
|
||||
|
||||
if ( actual[0] == null )
|
||||
{
|
||||
return expected[0] == null;
|
||||
}
|
||||
|
||||
return expected[0].equals( actual[0] );
|
||||
}
|
||||
};
|
||||
return false;
|
||||
}
|
||||
|
||||
public String toString( Object[] arguments )
|
||||
{
|
||||
return ArrayUtils.toString( arguments );
|
||||
}
|
||||
};
|
||||
|
||||
protected MockControl wagonMockControl;
|
||||
|
||||
|
@ -187,7 +192,8 @@ public abstract class AbstractProxyTestCase
|
|||
assertNotNull( "Actual File should not be null.", actualFile );
|
||||
|
||||
assertTrue( "Check actual file exists.", actualFile.exists() );
|
||||
assertEquals( "Check filename path is appropriate.", expectedFile.getCanonicalPath(), actualFile.getCanonicalPath() );
|
||||
assertEquals( "Check filename path is appropriate.", expectedFile.getCanonicalPath(),
|
||||
actualFile.getCanonicalPath() );
|
||||
assertEquals( "Check file path matches.", expectedFile.getAbsolutePath(), actualFile.getAbsolutePath() );
|
||||
|
||||
String expectedContents = FileUtils.readFileToString( sourceFile, null );
|
||||
|
@ -200,7 +206,7 @@ public abstract class AbstractProxyTestCase
|
|||
assertNull( "Found file: " + downloadedFile + "; but was expecting a failure", downloadedFile );
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings( "unchecked" )
|
||||
protected void assertNoTempFiles( File expectedFile )
|
||||
{
|
||||
File workingDir = expectedFile.getParentFile();
|
||||
|
@ -209,7 +215,7 @@ public abstract class AbstractProxyTestCase
|
|||
return;
|
||||
}
|
||||
|
||||
Collection<File> tmpFiles = FileUtils.listFiles( workingDir, new String[] { "tmp" }, false );
|
||||
Collection<File> tmpFiles = FileUtils.listFiles( workingDir, new String[]{"tmp"}, false );
|
||||
if ( !tmpFiles.isEmpty() )
|
||||
{
|
||||
StringBuffer emsg = new StringBuffer();
|
||||
|
@ -266,8 +272,8 @@ public abstract class AbstractProxyTestCase
|
|||
{
|
||||
if ( !destination.exists() && !destination.mkdirs() )
|
||||
{
|
||||
throw new IOException( "Could not create destination directory '"
|
||||
+ destination.getAbsolutePath() + "'." );
|
||||
throw new IOException(
|
||||
"Could not create destination directory '" + destination.getAbsolutePath() + "'." );
|
||||
}
|
||||
|
||||
copyDirectoryStructure( file, destination );
|
||||
|
@ -340,8 +346,8 @@ public abstract class AbstractProxyTestCase
|
|||
|
||||
protected void saveConnector( String sourceRepoId, String targetRepoId, boolean disabled )
|
||||
{
|
||||
saveConnector( sourceRepoId, targetRepoId, ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS,
|
||||
SnapshotsPolicy.ALWAYS, CachedFailuresPolicy.NO, disabled );
|
||||
saveConnector( sourceRepoId, targetRepoId, ChecksumPolicy.IGNORE, ReleasesPolicy.ALWAYS, SnapshotsPolicy.ALWAYS,
|
||||
CachedFailuresPolicy.NO, disabled );
|
||||
}
|
||||
|
||||
protected void saveConnector( String sourceRepoId, String targetRepoId, String checksumPolicy, String releasePolicy,
|
||||
|
@ -352,7 +358,8 @@ public abstract class AbstractProxyTestCase
|
|||
}
|
||||
|
||||
protected void saveConnector( String sourceRepoId, String targetRepoId, String checksumPolicy, String releasePolicy,
|
||||
String snapshotPolicy, String cacheFailuresPolicy, String errorPolicy, boolean disabled )
|
||||
String snapshotPolicy, String cacheFailuresPolicy, String errorPolicy,
|
||||
boolean disabled )
|
||||
{
|
||||
saveConnector( sourceRepoId, targetRepoId, checksumPolicy, releasePolicy, snapshotPolicy, cacheFailuresPolicy,
|
||||
errorPolicy, PropagateErrorsOnUpdateDownloadPolicy.NOT_PRESENT, disabled );
|
||||
|
@ -371,7 +378,7 @@ public abstract class AbstractProxyTestCase
|
|||
connectorConfig.addPolicy( ProxyConnectorConfiguration.POLICY_CACHE_FAILURES, cacheFailuresPolicy );
|
||||
connectorConfig.addPolicy( ProxyConnectorConfiguration.POLICY_PROPAGATE_ERRORS, errorPolicy );
|
||||
connectorConfig.addPolicy( ProxyConnectorConfiguration.POLICY_PROPAGATE_ERRORS_ON_UPDATE, errorOnUpdatePolicy );
|
||||
connectorConfig.setDisabled(disabled);
|
||||
connectorConfig.setDisabled( disabled );
|
||||
|
||||
int count = config.getConfiguration().getProxyConnectors().size();
|
||||
config.getConfiguration().addProxyConnector( connectorConfig );
|
||||
|
@ -385,10 +392,10 @@ public abstract class AbstractProxyTestCase
|
|||
config.triggerChange( prefix + ".policies.checksum", connectorConfig.getPolicy( "checksum", "" ) );
|
||||
config.triggerChange( prefix + ".policies.snapshots", connectorConfig.getPolicy( "snapshots", "" ) );
|
||||
config.triggerChange( prefix + ".policies.cache-failures", connectorConfig.getPolicy( "cache-failures", "" ) );
|
||||
config.triggerChange( prefix + ".policies.propagate-errors",
|
||||
connectorConfig.getPolicy( "propagate-errors", "" ) );
|
||||
config.triggerChange( prefix + ".policies.propagate-errors-on-update",
|
||||
connectorConfig.getPolicy( "propagate-errors-on-update", "" ) );
|
||||
config.triggerChange( prefix + ".policies.propagate-errors", connectorConfig.getPolicy( "propagate-errors",
|
||||
"" ) );
|
||||
config.triggerChange( prefix + ".policies.propagate-errors-on-update", connectorConfig.getPolicy(
|
||||
"propagate-errors-on-update", "" ) );
|
||||
}
|
||||
|
||||
protected void saveManagedRepositoryConfig( String id, String name, String path, String layout )
|
||||
|
@ -444,6 +451,7 @@ public abstract class AbstractProxyTestCase
|
|||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* @see org.codehaus.plexus.spring.PlexusInSpringTestCase#getConfigLocation()
|
||||
*/
|
||||
@Override
|
||||
|
@ -488,19 +496,28 @@ public abstract class AbstractProxyTestCase
|
|||
config.getConfiguration().addManagedRepository( repoConfig );
|
||||
|
||||
// Setup target (proxied to) repository.
|
||||
saveRemoteRepositoryConfig( ID_PROXIED1, "Proxied Repository 1", new File( REPOPATH_PROXIED1 ).toURL()
|
||||
.toExternalForm(), "default" );
|
||||
saveRemoteRepositoryConfig( ID_PROXIED1, "Proxied Repository 1", new File(
|
||||
REPOPATH_PROXIED1 ).toURL().toExternalForm(), "default" );
|
||||
|
||||
// Setup target (proxied to) repository.
|
||||
saveRemoteRepositoryConfig( ID_PROXIED2, "Proxied Repository 2", new File( REPOPATH_PROXIED2 ).toURL()
|
||||
.toExternalForm(), "default" );
|
||||
saveRemoteRepositoryConfig( ID_PROXIED2, "Proxied Repository 2", new File(
|
||||
REPOPATH_PROXIED2 ).toURL().toExternalForm(), "default" );
|
||||
|
||||
// Setup target (proxied to) repository using legacy layout.
|
||||
saveRemoteRepositoryConfig( ID_LEGACY_PROXIED, "Proxied Legacy Repository", new File( REPOPATH_PROXIED_LEGACY )
|
||||
.toURL().toExternalForm(), "legacy" );
|
||||
saveRemoteRepositoryConfig( ID_LEGACY_PROXIED, "Proxied Legacy Repository", new File(
|
||||
REPOPATH_PROXIED_LEGACY ).toURL().toExternalForm(), "legacy" );
|
||||
|
||||
// Setup the proxy handler.
|
||||
proxyHandler = (RepositoryProxyConnectors) lookup( RepositoryProxyConnectors.class.getName() );
|
||||
try
|
||||
{
|
||||
proxyHandler = (RepositoryProxyConnectors) lookup( RepositoryProxyConnectors.class.getName() );
|
||||
}
|
||||
catch ( Exception e )
|
||||
{
|
||||
// TODO: handle in plexus-spring instead
|
||||
applicationContext.close();
|
||||
throw e;
|
||||
}
|
||||
|
||||
// Setup the wagon mock.
|
||||
wagonMockControl = MockControl.createNiceControl( Wagon.class );
|
||||
|
@ -548,8 +565,8 @@ public abstract class AbstractProxyTestCase
|
|||
if ( !sourceDir.exists() )
|
||||
{
|
||||
// This is just a warning.
|
||||
System.err.println( "[WARN] Skipping setup of testable managed repository, source dir does not exist: "
|
||||
+ sourceDir );
|
||||
System.err.println(
|
||||
"[WARN] Skipping setup of testable managed repository, source dir does not exist: " + sourceDir );
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -583,8 +600,8 @@ public abstract class AbstractProxyTestCase
|
|||
|
||||
protected void assertNotModified( File file, long expectedModificationTime )
|
||||
{
|
||||
assertEquals( "File <" + file.getAbsolutePath() + "> not have been modified.",
|
||||
expectedModificationTime, file.lastModified() );
|
||||
assertEquals( "File <" + file.getAbsolutePath() + "> not have been modified.", expectedModificationTime,
|
||||
file.lastModified() );
|
||||
}
|
||||
|
||||
protected void assertNotExistsInManagedLegacyRepo( File file )
|
||||
|
@ -593,9 +610,9 @@ public abstract class AbstractProxyTestCase
|
|||
String managedLegacyPath = managedLegacyDir.getCanonicalPath();
|
||||
String testFile = file.getCanonicalPath();
|
||||
|
||||
assertTrue( "Unit Test Failure: File <" + testFile
|
||||
+ "> should be have been defined within the legacy managed path of <" + managedLegacyPath + ">", testFile
|
||||
.startsWith( managedLegacyPath ) );
|
||||
assertTrue( "Unit Test Failure: File <" + testFile +
|
||||
"> should be have been defined within the legacy managed path of <" + managedLegacyPath + ">",
|
||||
testFile.startsWith( managedLegacyPath ) );
|
||||
|
||||
assertFalse( "File < " + testFile + "> should not exist in managed legacy repository.", file.exists() );
|
||||
}
|
||||
|
@ -606,9 +623,9 @@ public abstract class AbstractProxyTestCase
|
|||
String managedDefaultPath = managedDefaultDir.getCanonicalPath();
|
||||
String testFile = file.getCanonicalPath();
|
||||
|
||||
assertTrue( "Unit Test Failure: File <" + testFile
|
||||
+ "> should be have been defined within the managed default path of <" + managedDefaultPath + ">", testFile
|
||||
.startsWith( managedDefaultPath ) );
|
||||
assertTrue( "Unit Test Failure: File <" + testFile +
|
||||
"> should be have been defined within the managed default path of <" + managedDefaultPath + ">",
|
||||
testFile.startsWith( managedDefaultPath ) );
|
||||
|
||||
assertFalse( "File < " + testFile + "> should not exist in managed default repository.", file.exists() );
|
||||
}
|
||||
|
|
|
@ -19,13 +19,6 @@ package org.apache.maven.archiva.proxy;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
|
@ -46,9 +39,15 @@ import org.mortbay.jetty.Request;
|
|||
import org.mortbay.jetty.Server;
|
||||
import org.mortbay.jetty.handler.AbstractHandler;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
/**
|
||||
* Integration test for connecting over a HTTP proxy.
|
||||
*
|
||||
*
|
||||
* @version $Id: ManagedDefaultTransferTest.java 677852 2008-07-18 08:16:24Z brett $
|
||||
*/
|
||||
public class HttpProxyTransferTest
|
||||
|
@ -75,7 +74,7 @@ public class HttpProxyTransferTest
|
|||
throws Exception
|
||||
{
|
||||
super.setUp();
|
||||
|
||||
|
||||
// Setup source repository (using default layout)
|
||||
String repoPath = "target/test-repository/managed/" + getName();
|
||||
|
||||
|
@ -96,8 +95,8 @@ public class HttpProxyTransferTest
|
|||
repo.setLocation( repoPath );
|
||||
repo.setLayout( "default" );
|
||||
|
||||
ManagedRepositoryContent repoContent =
|
||||
(ManagedRepositoryContent) lookup( ManagedRepositoryContent.class, "default" );
|
||||
ManagedRepositoryContent repoContent = (ManagedRepositoryContent) lookup( ManagedRepositoryContent.class,
|
||||
"default" );
|
||||
repoContent.setRepository( repo );
|
||||
managedDefaultRepository = repoContent;
|
||||
|
||||
|
@ -113,7 +112,7 @@ public class HttpProxyTransferTest
|
|||
response.setStatus( HttpServletResponse.SC_OK );
|
||||
response.getWriter().print( "get-default-layout-1.0.jar\n\n" );
|
||||
assertNotNull( request.getHeader( "Proxy-Connection" ) );
|
||||
|
||||
|
||||
( (Request) request ).setHandled( true );
|
||||
}
|
||||
};
|
||||
|
@ -130,7 +129,7 @@ public class HttpProxyTransferTest
|
|||
proxyConfig.setProtocol( "http" );
|
||||
proxyConfig.setId( PROXY_ID );
|
||||
config.getConfiguration().addNetworkProxy( proxyConfig );
|
||||
|
||||
|
||||
// Setup target (proxied to) repository.
|
||||
RemoteRepositoryConfiguration repoConfig = new RemoteRepositoryConfiguration();
|
||||
|
||||
|
@ -142,7 +141,16 @@ public class HttpProxyTransferTest
|
|||
config.getConfiguration().addRemoteRepository( repoConfig );
|
||||
|
||||
// Setup the proxy handler.
|
||||
proxyHandler = (RepositoryProxyConnectors) lookup( RepositoryProxyConnectors.class.getName() );
|
||||
try
|
||||
{
|
||||
proxyHandler = (RepositoryProxyConnectors) lookup( RepositoryProxyConnectors.class.getName() );
|
||||
}
|
||||
catch ( Exception e )
|
||||
{
|
||||
server.stop();
|
||||
applicationContext.close();
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -150,7 +158,7 @@ public class HttpProxyTransferTest
|
|||
throws Exception
|
||||
{
|
||||
super.tearDown();
|
||||
|
||||
|
||||
server.stop();
|
||||
}
|
||||
|
||||
|
@ -159,7 +167,7 @@ public class HttpProxyTransferTest
|
|||
{
|
||||
assertNull( System.getProperty( "http.proxyHost" ) );
|
||||
assertNull( System.getProperty( "http.proxyPort" ) );
|
||||
|
||||
|
||||
String path = "org/apache/maven/test/get-default-layout/1.0/get-default-layout-1.0.jar";
|
||||
|
||||
// Configure Connector (usually done within archiva.xml configuration)
|
||||
|
@ -183,7 +191,7 @@ public class HttpProxyTransferTest
|
|||
String expectedContents = FileUtils.readFileToString( sourceFile, null );
|
||||
String actualContents = FileUtils.readFileToString( downloadedFile, null );
|
||||
assertEquals( "Check file contents.", expectedContents, actualContents );
|
||||
|
||||
|
||||
assertNull( System.getProperty( "http.proxyHost" ) );
|
||||
assertNull( System.getProperty( "http.proxyPort" ) );
|
||||
}
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
~ Licensed to the Apache Software Foundation (ASF) under one
|
||||
~ or more contributor license agreements. See the NOTICE file
|
||||
~ distributed with this work for additional information
|
||||
~ regarding copyright ownership. The ASF licenses this file
|
||||
~ to you under the Apache License, Version 2.0 (the
|
||||
~ "License"); you may not use this file except in compliance
|
||||
~ with the License. You may obtain a copy of the License at
|
||||
~
|
||||
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||
~
|
||||
~ Unless required by applicable law or agreed to in writing,
|
||||
~ software distributed under the License is distributed on an
|
||||
~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
~ KIND, either express or implied. See the License for the
|
||||
~ specific language governing permissions and limitations
|
||||
~ under the License.
|
||||
-->
|
||||
|
||||
<beans xmlns="http://www.springframework.org/schema/beans"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd">
|
||||
|
||||
<bean id="repositorySessionFactory" class="org.apache.archiva.metadata.repository.TestRepositorySessionFactory"/>
|
||||
</beans>
|
|
@ -1,6 +1,7 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"
|
||||
xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<artifactId>archiva-scheduler</artifactId>
|
||||
|
@ -56,6 +57,11 @@
|
|||
<artifactId>slf4j-simple</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
|
|
|
@ -19,7 +19,10 @@ package org.apache.archiva.scheduler.repository;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
import org.apache.archiva.repository.scanner.RepositoryContentConsumers;
|
||||
|
@ -75,6 +78,13 @@ public class ArchivaRepositoryScanningTaskExecutor
|
|||
*/
|
||||
private RepositoryStatisticsManager repositoryStatisticsManager;
|
||||
|
||||
/**
|
||||
* TODO: may be different implementations
|
||||
*
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private RepositorySessionFactory repositorySessionFactory;
|
||||
|
||||
public void initialize()
|
||||
throws InitializationException
|
||||
{
|
||||
|
@ -127,48 +137,48 @@ public class ArchivaRepositoryScanningTaskExecutor
|
|||
long sinceWhen = RepositoryScanner.FRESH_SCAN;
|
||||
long previousFileCount = 0;
|
||||
|
||||
if ( !repoTask.isScanAll() )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
try
|
||||
{
|
||||
RepositoryStatistics previousStats;
|
||||
if ( !repoTask.isScanAll() )
|
||||
{
|
||||
RepositoryStatistics previousStats = repositoryStatisticsManager.getLastStatistics(
|
||||
metadataRepository, repoId );
|
||||
if ( previousStats != null )
|
||||
{
|
||||
sinceWhen = previousStats.getScanStartTime().getTime();
|
||||
previousFileCount = previousStats.getTotalFileCount();
|
||||
}
|
||||
}
|
||||
|
||||
RepositoryScanStatistics stats;
|
||||
try
|
||||
{
|
||||
previousStats = repositoryStatisticsManager.getLastStatistics( repoId );
|
||||
stats = repoScanner.scan( arepo, sinceWhen );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
catch ( RepositoryScannerException e )
|
||||
{
|
||||
throw new TaskExecutionException( "Unable to get previous statistics: " + e.getMessage(), e );
|
||||
throw new TaskExecutionException( "Repository error when executing repository job.", e );
|
||||
}
|
||||
if ( previousStats != null )
|
||||
{
|
||||
sinceWhen = previousStats.getScanStartTime().getTime();
|
||||
previousFileCount = previousStats.getTotalFileCount();
|
||||
}
|
||||
}
|
||||
|
||||
RepositoryScanStatistics stats;
|
||||
try
|
||||
{
|
||||
stats = repoScanner.scan( arepo, sinceWhen );
|
||||
}
|
||||
catch ( RepositoryScannerException e )
|
||||
{
|
||||
throw new TaskExecutionException( "Repository error when executing repository job.", e );
|
||||
}
|
||||
log.info( "Finished first scan: " + stats.toDump( arepo ) );
|
||||
|
||||
log.info( "Finished first scan: " + stats.toDump( arepo ) );
|
||||
|
||||
// further statistics will be populated by the following method
|
||||
Date endTime = new Date( stats.getWhenGathered().getTime() + stats.getDuration() );
|
||||
try
|
||||
{
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( repoId, stats.getWhenGathered(), endTime,
|
||||
stats.getTotalFileCount(),
|
||||
// further statistics will be populated by the following method
|
||||
Date endTime = new Date( stats.getWhenGathered().getTime() + stats.getDuration() );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, repoId, stats.getWhenGathered(),
|
||||
endTime, stats.getTotalFileCount(),
|
||||
stats.getTotalFileCount() - previousFileCount );
|
||||
repositorySession.save();
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
throw new TaskExecutionException( "Unable to store updated statistics: " + e.getMessage(), e );
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
// log.info( "Scanning for removed repository content" );
|
||||
|
||||
|
|
|
@ -19,7 +19,10 @@ package org.apache.archiva.scheduler.repository;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
import org.apache.archiva.scheduler.ArchivaTaskScheduler;
|
||||
import org.apache.maven.archiva.common.ArchivaException;
|
||||
|
@ -77,6 +80,13 @@ public class RepositoryArchivaTaskScheduler
|
|||
*/
|
||||
private RepositoryStatisticsManager repositoryStatisticsManager;
|
||||
|
||||
/**
|
||||
* TODO: could have multiple implementations
|
||||
*
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private RepositorySessionFactory repositorySessionFactory;
|
||||
|
||||
private static final String REPOSITORY_SCAN_GROUP = "rg";
|
||||
|
||||
private static final String REPOSITORY_JOB = "rj";
|
||||
|
@ -114,33 +124,42 @@ public class RepositoryArchivaTaskScheduler
|
|||
List<ManagedRepositoryConfiguration> repositories =
|
||||
archivaConfiguration.getConfiguration().getManagedRepositories();
|
||||
|
||||
for ( ManagedRepositoryConfiguration repoConfig : repositories )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
if ( repoConfig.isScanned() )
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
for ( ManagedRepositoryConfiguration repoConfig : repositories )
|
||||
{
|
||||
try
|
||||
if ( repoConfig.isScanned() )
|
||||
{
|
||||
scheduleRepositoryJobs( repoConfig );
|
||||
}
|
||||
catch ( SchedulerException e )
|
||||
{
|
||||
throw new StartingException( "Unable to start scheduler: " + e.getMessage(), e );
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
if ( !isPreviouslyScanned( repoConfig ) )
|
||||
try
|
||||
{
|
||||
queueInitialRepoScan( repoConfig );
|
||||
scheduleRepositoryJobs( repoConfig );
|
||||
}
|
||||
catch ( SchedulerException e )
|
||||
{
|
||||
throw new StartingException( "Unable to start scheduler: " + e.getMessage(), e );
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
if ( !isPreviouslyScanned( repoConfig, metadataRepository ) )
|
||||
{
|
||||
queueInitialRepoScan( repoConfig );
|
||||
}
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to determine if a repository is already scanned, skipping initial scan: " +
|
||||
e.getMessage(), e );
|
||||
}
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to determine if a repository is already scanned, skipping initial scan: " +
|
||||
e.getMessage(), e );
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
}
|
||||
|
||||
public void stop()
|
||||
|
@ -270,10 +289,11 @@ public class RepositoryArchivaTaskScheduler
|
|||
}
|
||||
|
||||
@SuppressWarnings( "unchecked" )
|
||||
private boolean isPreviouslyScanned( ManagedRepositoryConfiguration repoConfig )
|
||||
private boolean isPreviouslyScanned( ManagedRepositoryConfiguration repoConfig,
|
||||
MetadataRepository metadataRepository )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
return repositoryStatisticsManager.getLastStatistics( repoConfig.getId() ) != null;
|
||||
return repositoryStatisticsManager.getLastStatistics( metadataRepository, repoConfig.getId() ) != null;
|
||||
}
|
||||
|
||||
// MRM-848: Pre-configured repository initially appear to be empty
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
package org.apache.archiva.metadata.repository;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
public class TestRepositorySessionFactory
|
||||
implements RepositorySessionFactory
|
||||
{
|
||||
private MetadataRepository repository;
|
||||
|
||||
private MetadataResolver resolver;
|
||||
|
||||
public RepositorySession createSession()
|
||||
{
|
||||
return new RepositorySession( repository, resolver );
|
||||
}
|
||||
|
||||
public void setRepository( MetadataRepository repository )
|
||||
{
|
||||
this.repository = repository;
|
||||
}
|
||||
|
||||
public void setResolver( MetadataResolver resolver )
|
||||
{
|
||||
this.resolver = resolver;
|
||||
}
|
||||
}
|
|
@ -19,6 +19,7 @@ package org.apache.archiva.scheduler.repository;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
|
@ -36,6 +37,8 @@ import java.util.Collection;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
/**
|
||||
* ArchivaRepositoryScanningTaskExecutorTest
|
||||
*
|
||||
|
@ -54,12 +57,23 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
|
||||
private TestConsumer testConsumer;
|
||||
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
{
|
||||
super.setUp();
|
||||
|
||||
taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-repository-scanning" );
|
||||
try
|
||||
{
|
||||
taskExecutor = (TaskExecutor) lookup( TaskExecutor.class, "test-repository-scanning" );
|
||||
}
|
||||
catch ( Exception e )
|
||||
{
|
||||
// TODO: handle cleanup in plexus-spring lookup method instead
|
||||
applicationContext.close();
|
||||
throw e;
|
||||
}
|
||||
|
||||
File sourceRepoDir = new File( getBasedir(), "src/test/repositories/default-repository" );
|
||||
repoDir = new File( getBasedir(), "target/default-repository" );
|
||||
|
@ -98,6 +112,8 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
|
||||
repositoryStatisticsManager = (RepositoryStatisticsManager) lookup( RepositoryStatisticsManager.class );
|
||||
testConsumer = (TestConsumer) lookup( KnownRepositoryContentConsumer.class, "test-consumer" );
|
||||
|
||||
metadataRepository = mock( MetadataRepository.class );
|
||||
}
|
||||
|
||||
protected void tearDown()
|
||||
|
@ -145,7 +161,8 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
unprocessedResultList.size() );
|
||||
|
||||
// check correctness of new stats
|
||||
RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
|
||||
RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
|
||||
TEST_REPO_ID );
|
||||
assertEquals( 0, newStats.getNewFileCount() );
|
||||
assertEquals( 31, newStats.getTotalFileCount() );
|
||||
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
|
||||
|
@ -176,7 +193,8 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
unprocessedResultList.size() );
|
||||
|
||||
// check correctness of new stats
|
||||
RepositoryStatistics updatedStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
|
||||
RepositoryStatistics updatedStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
|
||||
TEST_REPO_ID );
|
||||
assertEquals( 2, updatedStats.getNewFileCount() );
|
||||
assertEquals( 33, updatedStats.getTotalFileCount() );
|
||||
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
|
||||
|
@ -220,7 +238,8 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
unprocessedResultList.size() );
|
||||
|
||||
// check correctness of new stats
|
||||
RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
|
||||
RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
|
||||
TEST_REPO_ID );
|
||||
assertEquals( 2, newStats.getNewFileCount() );
|
||||
assertEquals( 33, newStats.getTotalFileCount() );
|
||||
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
|
||||
|
@ -264,7 +283,8 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
unprocessedResultList.size() );
|
||||
|
||||
// check correctness of new stats
|
||||
RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
|
||||
RepositoryStatistics newStats = repositoryStatisticsManager.getLastStatistics( metadataRepository,
|
||||
TEST_REPO_ID );
|
||||
assertEquals( 2, newStats.getNewFileCount() );
|
||||
assertEquals( 33, newStats.getTotalFileCount() );
|
||||
// FIXME: can't test these as they weren't stored in the database, move to tests for RepositoryStatisticsManager implementation
|
||||
|
@ -283,8 +303,8 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
repoTask.setScanAll( true );
|
||||
|
||||
Date date = Calendar.getInstance().getTime();
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, new Date( date.getTime() - 1234567 ), date, 8,
|
||||
8 );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, new Date(
|
||||
date.getTime() - 1234567 ), date, 8, 8 );
|
||||
|
||||
taskExecutor.executeTask( repoTask );
|
||||
|
||||
|
@ -308,7 +328,7 @@ public class ArchivaRepositoryScanningTaskExecutorTest
|
|||
stats.setTotalProjectCount( 5 );
|
||||
stats.setTotalArtifactFileSize( 38545 );
|
||||
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, new Date( date.getTime() - 1234567 ), date,
|
||||
31, 31 );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, new Date(
|
||||
date.getTime() - 1234567 ), date, 31, 31 );
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,21 +19,22 @@ package org.apache.archiva.scheduler.repository;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
|
||||
public class TestRepositoryStatisticsManager
|
||||
implements RepositoryStatisticsManager
|
||||
{
|
||||
private Map<String, List<RepositoryStatistics>> repoStats = new HashMap<String, List<RepositoryStatistics>>();
|
||||
|
||||
public RepositoryStatistics getLastStatistics( String repositoryId )
|
||||
public RepositoryStatistics getLastStatistics( MetadataRepository metadataRepository, String repositoryId )
|
||||
{
|
||||
List<RepositoryStatistics> repositoryStatisticsList = getStatsList( repositoryId );
|
||||
return !repositoryStatisticsList.isEmpty()
|
||||
|
@ -41,8 +42,8 @@ public class TestRepositoryStatisticsManager
|
|||
: null;
|
||||
}
|
||||
|
||||
public void addStatisticsAfterScan( String repositoryId, Date startTime, Date endTime, long totalFiles,
|
||||
long newFiles )
|
||||
public void addStatisticsAfterScan( MetadataRepository metadataRepository, String repositoryId, Date startTime,
|
||||
Date endTime, long totalFiles, long newFiles )
|
||||
{
|
||||
List<RepositoryStatistics> stats = getStatsList( repositoryId );
|
||||
|
||||
|
@ -55,12 +56,13 @@ public class TestRepositoryStatisticsManager
|
|||
stats.add( repositoryStatistics );
|
||||
}
|
||||
|
||||
public void deleteStatistics( String repositoryId )
|
||||
public void deleteStatistics( MetadataRepository metadataRepository, String repositoryId )
|
||||
{
|
||||
repoStats.remove( repositoryId );
|
||||
}
|
||||
|
||||
public List<RepositoryStatistics> getStatisticsInRange( String repositoryId, Date startDate, Date endDate )
|
||||
public List<RepositoryStatistics> getStatisticsInRange( MetadataRepository metadataRepository, String repositoryId,
|
||||
Date startDate, Date endDate )
|
||||
{
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
|
|
@ -37,6 +37,9 @@
|
|||
<requirement>
|
||||
<role>org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager</role>
|
||||
</requirement>
|
||||
<requirement>
|
||||
<role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
|
||||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
||||
|
@ -87,5 +90,10 @@
|
|||
</requirement>
|
||||
</requirements>
|
||||
</component>
|
||||
|
||||
<component>
|
||||
<role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
|
||||
<implementation>org.apache.archiva.metadata.repository.TestRepositorySessionFactory</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
||||
|
|
|
@ -19,22 +19,10 @@ package org.apache.archiva.rss.processor;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
|
||||
public abstract class AbstractArtifactsRssFeedProcessor
|
||||
implements RssFeedProcessor
|
||||
{
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
protected MetadataRepository metadataRepository;
|
||||
|
||||
protected abstract String getTitle();
|
||||
|
||||
protected abstract String getDescription();
|
||||
|
||||
public void setMetadataRepository( MetadataRepository metadataRepository )
|
||||
{
|
||||
this.metadataRepository = metadataRepository;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.apache.archiva.rss.processor;
|
|||
import com.sun.syndication.feed.synd.SyndFeed;
|
||||
import com.sun.syndication.io.FeedException;
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.rss.RssFeedEntry;
|
||||
import org.apache.archiva.rss.RssFeedGenerator;
|
||||
|
@ -64,7 +65,7 @@ public class NewArtifactsRssFeedProcessor
|
|||
* Process the newly discovered artifacts in the repository. Generate feeds for new artifacts in the repository and
|
||||
* new versions of artifact.
|
||||
*/
|
||||
public SyndFeed process( Map<String, String> reqParams )
|
||||
public SyndFeed process( Map<String, String> reqParams, MetadataRepository metadataRepository )
|
||||
throws FeedException
|
||||
{
|
||||
log.debug( "Process new artifacts into rss feeds." );
|
||||
|
@ -72,20 +73,20 @@ public class NewArtifactsRssFeedProcessor
|
|||
String repoId = reqParams.get( RssFeedProcessor.KEY_REPO_ID );
|
||||
if ( repoId != null )
|
||||
{
|
||||
return processNewArtifactsInRepo( repoId );
|
||||
return processNewArtifactsInRepo( repoId, metadataRepository );
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private SyndFeed processNewArtifactsInRepo( String repoId )
|
||||
private SyndFeed processNewArtifactsInRepo( String repoId, MetadataRepository metadataRepository )
|
||||
throws FeedException
|
||||
{
|
||||
Calendar greaterThanThisDate = Calendar.getInstance( GMT_TIME_ZONE );
|
||||
greaterThanThisDate.add( Calendar.DATE, -( getNumberOfDaysBeforeNow() ) );
|
||||
greaterThanThisDate.clear( Calendar.MILLISECOND );
|
||||
|
||||
List<ArtifactMetadata> artifacts = null;
|
||||
List<ArtifactMetadata> artifacts;
|
||||
try
|
||||
{
|
||||
artifacts = metadataRepository.getArtifactsByDateRange( repoId, greaterThanThisDate.getTime(), null );
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.apache.archiva.rss.processor;
|
|||
import com.sun.syndication.feed.synd.SyndFeed;
|
||||
import com.sun.syndication.io.FeedException;
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolutionException;
|
||||
import org.apache.archiva.rss.RssFeedEntry;
|
||||
|
@ -59,7 +60,7 @@ public class NewVersionsOfArtifactRssFeedProcessor
|
|||
/**
|
||||
* Process all versions of the artifact which had a rss feed request.
|
||||
*/
|
||||
public SyndFeed process( Map<String, String> reqParams )
|
||||
public SyndFeed process( Map<String, String> reqParams, MetadataRepository metadataRepository )
|
||||
throws FeedException
|
||||
{
|
||||
String groupId = reqParams.get( RssFeedProcessor.KEY_GROUP_ID );
|
||||
|
@ -67,13 +68,14 @@ public class NewVersionsOfArtifactRssFeedProcessor
|
|||
|
||||
if ( groupId != null && artifactId != null )
|
||||
{
|
||||
return processNewVersionsOfArtifact( groupId, artifactId );
|
||||
return processNewVersionsOfArtifact( groupId, artifactId, metadataRepository );
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private SyndFeed processNewVersionsOfArtifact( String groupId, String artifactId )
|
||||
private SyndFeed processNewVersionsOfArtifact( String groupId, String artifactId,
|
||||
MetadataRepository metadataRepository )
|
||||
throws FeedException
|
||||
{
|
||||
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>();
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.archiva.rss.processor;
|
|||
|
||||
import com.sun.syndication.feed.synd.SyndFeed;
|
||||
import com.sun.syndication.io.FeedException;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -35,6 +36,6 @@ public interface RssFeedProcessor
|
|||
|
||||
public static final String KEY_ARTIFACT_ID = "artifactId";
|
||||
|
||||
SyndFeed process( Map<String, String> reqParams )
|
||||
SyndFeed process( Map<String, String> reqParams, MetadataRepository metadataRepository )
|
||||
throws FeedException;
|
||||
}
|
||||
|
|
|
@ -59,7 +59,6 @@ public class NewArtifactsRssFeedProcessorTest
|
|||
newArtifactsProcessor.setGenerator( new RssFeedGenerator() );
|
||||
|
||||
metadataRepository = new MetadataRepositoryMock();
|
||||
newArtifactsProcessor.setMetadataRepository( metadataRepository );
|
||||
}
|
||||
|
||||
@SuppressWarnings( "unchecked" )
|
||||
|
@ -83,7 +82,7 @@ public class NewArtifactsRssFeedProcessorTest
|
|||
Map<String, String> reqParams = new HashMap<String, String>();
|
||||
reqParams.put( RssFeedProcessor.KEY_REPO_ID, TEST_REPO );
|
||||
|
||||
SyndFeed feed = newArtifactsProcessor.process( reqParams );
|
||||
SyndFeed feed = newArtifactsProcessor.process( reqParams, metadataRepository );
|
||||
|
||||
// check that the date used in the call is close to the one passed (5 seconds difference at most)
|
||||
Calendar cal = Calendar.getInstance( TimeZone.getTimeZone( "GMT" ) );
|
||||
|
|
|
@ -60,10 +60,9 @@ public class NewVersionsOfArtifactRssFeedProcessorTest
|
|||
|
||||
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
|
||||
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
|
||||
newVersionsProcessor.setMetadataRepository( metadataRepository );
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings( "unchecked" )
|
||||
public void testProcess()
|
||||
throws Exception
|
||||
{
|
||||
|
@ -80,23 +79,23 @@ public class NewVersionsOfArtifactRssFeedProcessorTest
|
|||
reqParams.put( RssFeedProcessor.KEY_GROUP_ID, GROUP_ID );
|
||||
reqParams.put( RssFeedProcessor.KEY_ARTIFACT_ID, ARTIFACT_ID );
|
||||
|
||||
metadataRepositoryControl.expectAndReturn( metadataRepository.getRepositories(),
|
||||
Collections.singletonList( TEST_REPO ) );
|
||||
metadataRepositoryControl.expectAndReturn(
|
||||
metadataRepository.getProjectVersions( TEST_REPO, GROUP_ID, ARTIFACT_ID ),
|
||||
Arrays.asList( "1.0.1", "1.0.2", "1.0.3-SNAPSHOT" ) );
|
||||
metadataRepositoryControl.expectAndReturn(
|
||||
metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.1" ),
|
||||
Collections.singletonList( artifact1 ) );
|
||||
metadataRepositoryControl.expectAndReturn(
|
||||
metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.2" ),
|
||||
Collections.singletonList( artifact2 ) );
|
||||
metadataRepositoryControl.expectAndReturn(
|
||||
metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID, "1.0.3-SNAPSHOT" ),
|
||||
Collections.singletonList( artifact3 ) );
|
||||
metadataRepositoryControl.expectAndReturn( metadataRepository.getRepositories(), Collections.singletonList(
|
||||
TEST_REPO ) );
|
||||
metadataRepositoryControl.expectAndReturn( metadataRepository.getProjectVersions( TEST_REPO, GROUP_ID,
|
||||
ARTIFACT_ID ), Arrays.asList(
|
||||
"1.0.1", "1.0.2", "1.0.3-SNAPSHOT" ) );
|
||||
metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID,
|
||||
"1.0.1" ),
|
||||
Collections.singletonList( artifact1 ) );
|
||||
metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID,
|
||||
"1.0.2" ),
|
||||
Collections.singletonList( artifact2 ) );
|
||||
metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( TEST_REPO, GROUP_ID, ARTIFACT_ID,
|
||||
"1.0.3-SNAPSHOT" ),
|
||||
Collections.singletonList( artifact3 ) );
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
SyndFeed feed = newVersionsProcessor.process( reqParams );
|
||||
SyndFeed feed = newVersionsProcessor.process( reqParams, metadataRepository );
|
||||
|
||||
assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two'", feed.getTitle() );
|
||||
assertEquals( "New versions of artifact 'org.apache.archiva:artifact-two' found during repository scan.",
|
||||
|
@ -108,8 +107,8 @@ public class NewVersionsOfArtifactRssFeedProcessorTest
|
|||
|
||||
assertEquals( 2, entries.size() );
|
||||
|
||||
assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two' as of " + whenGathered,
|
||||
entries.get( 0 ).getTitle() );
|
||||
assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two' as of " + whenGathered, entries.get(
|
||||
0 ).getTitle() );
|
||||
assertEquals( whenGathered, entries.get( 0 ).getPublishedDate() );
|
||||
|
||||
assertEquals( "New Versions of Artifact 'org.apache.archiva:artifact-two' as of " + whenGatheredNext,
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
~ under the License.
|
||||
-->
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.apache.archiva</groupId>
|
||||
|
@ -41,7 +41,7 @@
|
|||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<version>1.4</version>
|
||||
</dependency>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.codehaus.plexus</groupId>
|
||||
|
@ -73,10 +73,10 @@
|
|||
<version>1.0.1</version>
|
||||
<scope>test</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
|
@ -89,7 +89,7 @@
|
|||
<groupId>jaxen</groupId>
|
||||
<artifactId>jaxen</artifactId>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- Dependencies below are provided by the appserver -->
|
||||
<dependency>
|
||||
<groupId>org.apache.derby</groupId>
|
||||
|
@ -143,7 +143,7 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
|
@ -176,7 +176,7 @@
|
|||
<name>browser</name>
|
||||
<value>${selenium.browser}</value>
|
||||
</property>
|
||||
<property>
|
||||
<property>
|
||||
<name>baseUrl</name>
|
||||
<value>${baseUrl}</value>
|
||||
</property>
|
||||
|
@ -227,6 +227,7 @@
|
|||
<goal>copy-dependencies</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<!-- TODO: this sometimes copies everything, causing problems with the server start up -->
|
||||
<includeGroupIds>org.apache.derby,javax.mail,javax.activation</includeGroupIds>
|
||||
<overWriteReleases>false</overWriteReleases>
|
||||
<overWriteSnapshots>true</overWriteSnapshots>
|
||||
|
@ -247,22 +248,22 @@
|
|||
<configuration>
|
||||
<tasks>
|
||||
<copy todir="${project.build.directory}/appserver-base">
|
||||
<fileset dir="src/test/resources/appserver-base" />
|
||||
<fileset dir="src/test/resources/appserver-base"/>
|
||||
</copy>
|
||||
<copy todir="${project.build.directory}/repository">
|
||||
<fileset dir="src/test/resources/repository" />
|
||||
<fileset dir="src/test/resources/repository"/>
|
||||
</copy>
|
||||
<copy todir="${project.build.directory}/index">
|
||||
<fileset dir="src/test/resources/index" />
|
||||
<fileset dir="src/test/resources/index"/>
|
||||
</copy>
|
||||
<copy todir="${project.build.directory}/snapshots">
|
||||
<fileset dir="src/test/resources/snapshots" />
|
||||
<fileset dir="src/test/resources/snapshots"/>
|
||||
</copy>
|
||||
<copy todir="${project.build.directory}/projects">
|
||||
<fileset dir="src/test/resources/projects" />
|
||||
<fileset dir="src/test/resources/projects"/>
|
||||
</copy>
|
||||
<copy todir="${project.build.directory}/local-repo">
|
||||
<fileset dir="src/test/resources/local-repo" />
|
||||
<fileset dir="src/test/resources/local-repo"/>
|
||||
</copy>
|
||||
</tasks>
|
||||
</configuration>
|
||||
|
@ -273,12 +274,12 @@
|
|||
<configuration>
|
||||
<tasks>
|
||||
<copy todir="${project.build.directory}/${container.name}conf">
|
||||
<fileset dir="src/test/${container.name}" />
|
||||
<fileset dir="src/test/${container.name}"/>
|
||||
</copy>
|
||||
<copy
|
||||
todir="${cargo.install.dir}/${container.name}/apache-tomcat-${tomcat5x.version}/apache-tomcat-${tomcat5x.version}/common/lib">
|
||||
todir="${cargo.install.dir}/${container.name}/apache-tomcat-${tomcat5x.version}/apache-tomcat-${tomcat5x.version}/common/lib">
|
||||
<fileset dir="${project.build.directory}/providedDependencies">
|
||||
<include name="**/*.jar" />
|
||||
<include name="**/*.jar"/>
|
||||
</fileset>
|
||||
</copy>
|
||||
</tasks>
|
||||
|
@ -292,8 +293,8 @@
|
|||
<phase>integration-test</phase>
|
||||
<configuration>
|
||||
<tasks>
|
||||
<get src="http://localhost:9696/archiva/" dest="${project.build.directory}/index.html" />
|
||||
<delete file="${project.build.directory}/index.html" />
|
||||
<get src="http://localhost:9696/archiva/" dest="${project.build.directory}/index.html"/>
|
||||
<delete file="${project.build.directory}/index.html"/>
|
||||
</tasks>
|
||||
</configuration>
|
||||
<goals>
|
||||
|
@ -386,7 +387,7 @@
|
|||
<configuration>
|
||||
<background>true</background>
|
||||
<port>${seleniumPort}</port>
|
||||
<logOutput>true</logOutput>
|
||||
<logOutput>true</logOutput>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
|
@ -403,7 +404,9 @@
|
|||
</activation>
|
||||
<properties>
|
||||
<container.name>tomcat5x</container.name>
|
||||
<container.url>http://archive.apache.org/dist/tomcat/tomcat-5/v${tomcat5x.version}/bin/apache-tomcat-${tomcat5x.version}.zip</container.url>
|
||||
<container.url>
|
||||
http://archive.apache.org/dist/tomcat/tomcat-5/v${tomcat5x.version}/bin/apache-tomcat-${tomcat5x.version}.zip
|
||||
</container.url>
|
||||
</properties>
|
||||
</profile>
|
||||
<profile>
|
||||
|
@ -415,7 +418,7 @@
|
|||
</activation>
|
||||
<properties>
|
||||
<selenium.browser>*firefox</selenium.browser>
|
||||
<excluded.groups />
|
||||
<excluded.groups/>
|
||||
</properties>
|
||||
</profile>
|
||||
<profile>
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.apache.maven.archiva.web.action;
|
|||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolutionException;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolver;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.storage.maven2.MavenProjectFacet;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
|
@ -42,11 +43,6 @@ import java.util.Set;
|
|||
public class BrowseAction
|
||||
extends AbstractRepositoryBasedAction
|
||||
{
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataResolver metadataResolver;
|
||||
|
||||
private String groupId;
|
||||
|
||||
private String artifactId;
|
||||
|
@ -75,29 +71,40 @@ public class BrowseAction
|
|||
// TODO: this logic should be optional, particularly remembering we want to keep this code simple
|
||||
// it is located here to avoid the content repository implementation needing to do too much for what
|
||||
// is essentially presentation code
|
||||
Set<String> namespacesToCollapse = new LinkedHashSet<String>();
|
||||
for ( String repoId : selectedRepos )
|
||||
Set<String> namespacesToCollapse;
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
namespacesToCollapse.addAll( metadataResolver.resolveRootNamespaces( repoId ) );
|
||||
}
|
||||
MetadataResolver metadataResolver = repositorySession.getResolver();
|
||||
namespacesToCollapse = new LinkedHashSet<String>();
|
||||
for ( String repoId : selectedRepos )
|
||||
{
|
||||
namespacesToCollapse.addAll( metadataResolver.resolveRootNamespaces( repositorySession, repoId ) );
|
||||
}
|
||||
|
||||
for ( String n : namespacesToCollapse )
|
||||
for ( String n : namespacesToCollapse )
|
||||
{
|
||||
// TODO: check performance of this
|
||||
namespaces.add( collapseNamespaces( repositorySession, metadataResolver, selectedRepos, n ) );
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
// TODO: check performance of this
|
||||
namespaces.add( collapseNamespaces( selectedRepos, n ) );
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
this.namespaces = getSortedList( namespaces );
|
||||
return SUCCESS;
|
||||
}
|
||||
|
||||
private String collapseNamespaces( Collection<String> repoIds, String n )
|
||||
private String collapseNamespaces( RepositorySession repositorySession, MetadataResolver metadataResolver,
|
||||
Collection<String> repoIds, String n )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
Set<String> subNamespaces = new LinkedHashSet<String>();
|
||||
for ( String repoId : repoIds )
|
||||
{
|
||||
subNamespaces.addAll( metadataResolver.resolveNamespaces( repoId, n ) );
|
||||
subNamespaces.addAll( metadataResolver.resolveNamespaces( repositorySession, repoId, n ) );
|
||||
}
|
||||
if ( subNamespaces.size() != 1 )
|
||||
{
|
||||
|
@ -111,7 +118,7 @@ public class BrowseAction
|
|||
{
|
||||
for ( String repoId : repoIds )
|
||||
{
|
||||
Collection<String> projects = metadataResolver.resolveProjects( repoId, n );
|
||||
Collection<String> projects = metadataResolver.resolveProjects( repositorySession, repoId, n );
|
||||
if ( projects != null && !projects.isEmpty() )
|
||||
{
|
||||
if ( log.isDebugEnabled() )
|
||||
|
@ -121,7 +128,8 @@ public class BrowseAction
|
|||
return n;
|
||||
}
|
||||
}
|
||||
return collapseNamespaces( repoIds, n + "." + subNamespaces.iterator().next() );
|
||||
return collapseNamespaces( repositorySession, metadataResolver, repoIds,
|
||||
n + "." + subNamespaces.iterator().next() );
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -143,22 +151,34 @@ public class BrowseAction
|
|||
|
||||
Set<String> projects = new LinkedHashSet<String>();
|
||||
|
||||
Set<String> namespacesToCollapse = new LinkedHashSet<String>();
|
||||
for ( String repoId : selectedRepos )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
Set<String> namespaces;
|
||||
try
|
||||
{
|
||||
namespacesToCollapse.addAll( metadataResolver.resolveNamespaces( repoId, groupId ) );
|
||||
MetadataResolver metadataResolver = repositorySession.getResolver();
|
||||
|
||||
projects.addAll( metadataResolver.resolveProjects( repoId, groupId ) );
|
||||
Set<String> namespacesToCollapse = new LinkedHashSet<String>();
|
||||
for ( String repoId : selectedRepos )
|
||||
{
|
||||
namespacesToCollapse.addAll( metadataResolver.resolveNamespaces( repositorySession, repoId, groupId ) );
|
||||
|
||||
projects.addAll( metadataResolver.resolveProjects( repositorySession, repoId, groupId ) );
|
||||
}
|
||||
|
||||
// TODO: this logic should be optional, particularly remembering we want to keep this code simple
|
||||
// it is located here to avoid the content repository implementation needing to do too much for what
|
||||
// is essentially presentation code
|
||||
namespaces = new LinkedHashSet<String>();
|
||||
for ( String n : namespacesToCollapse )
|
||||
{
|
||||
// TODO: check performance of this
|
||||
namespaces.add( collapseNamespaces( repositorySession, metadataResolver, selectedRepos,
|
||||
groupId + "." + n ) );
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: this logic should be optional, particularly remembering we want to keep this code simple
|
||||
// it is located here to avoid the content repository implementation needing to do too much for what
|
||||
// is essentially presentation code
|
||||
Set<String> namespaces = new LinkedHashSet<String>();
|
||||
for ( String n : namespacesToCollapse )
|
||||
finally
|
||||
{
|
||||
// TODO: check performance of this
|
||||
namespaces.add( collapseNamespaces( selectedRepos, groupId + "." + n ) );
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
this.namespaces = getSortedList( namespaces );
|
||||
|
@ -196,21 +216,33 @@ public class BrowseAction
|
|||
return GlobalResults.ACCESS_TO_NO_REPOS;
|
||||
}
|
||||
|
||||
Set<String> versions = new LinkedHashSet<String>();
|
||||
for ( String repoId : selectedRepos )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
versions.addAll( metadataResolver.resolveProjectVersions( repoId, groupId, artifactId ) );
|
||||
MetadataResolver metadataResolver = repositorySession.getResolver();
|
||||
|
||||
Set<String> versions = new LinkedHashSet<String>();
|
||||
for ( String repoId : selectedRepos )
|
||||
{
|
||||
versions.addAll( metadataResolver.resolveProjectVersions( repositorySession, repoId, groupId,
|
||||
artifactId ) );
|
||||
}
|
||||
|
||||
// TODO: sort by known version ordering method
|
||||
this.projectVersions = new ArrayList<String>( versions );
|
||||
|
||||
populateSharedModel( repositorySession, metadataResolver, selectedRepos, versions );
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
// TODO: sort by known version ordering method
|
||||
this.projectVersions = new ArrayList<String>( versions );
|
||||
|
||||
populateSharedModel( selectedRepos, versions );
|
||||
|
||||
return SUCCESS;
|
||||
}
|
||||
|
||||
private void populateSharedModel( Collection<String> selectedRepos, Collection<String> projectVersions )
|
||||
private void populateSharedModel( RepositorySession repositorySession, MetadataResolver metadataResolver,
|
||||
Collection<String> selectedRepos, Collection<String> projectVersions )
|
||||
{
|
||||
sharedModel = new ProjectVersionMetadata();
|
||||
|
||||
|
@ -230,8 +262,8 @@ public class BrowseAction
|
|||
{
|
||||
try
|
||||
{
|
||||
versionMetadata = metadataResolver.resolveProjectVersion( repoId, groupId, artifactId,
|
||||
version );
|
||||
versionMetadata = metadataResolver.resolveProjectVersion( repositorySession, repoId, groupId,
|
||||
artifactId, version );
|
||||
}
|
||||
catch ( MetadataResolutionException e )
|
||||
{
|
||||
|
@ -351,11 +383,6 @@ public class BrowseAction
|
|||
return sharedModel;
|
||||
}
|
||||
|
||||
public MetadataResolver getMetadataResolver()
|
||||
{
|
||||
return metadataResolver;
|
||||
}
|
||||
|
||||
public Collection<String> getProjectIds()
|
||||
{
|
||||
return projectIds;
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.archiva.metadata.model.ArtifactMetadata;
|
|||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolutionException;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.apache.maven.archiva.common.utils.VersionComparator;
|
||||
import org.apache.maven.archiva.common.utils.VersionUtil;
|
||||
|
@ -117,11 +118,6 @@ public class DeleteArtifactAction
|
|||
|
||||
private ChecksumAlgorithm[] algorithms = new ChecksumAlgorithm[]{ChecksumAlgorithm.SHA1, ChecksumAlgorithm.MD5};
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
public String getGroupId()
|
||||
{
|
||||
return groupId;
|
||||
|
@ -194,21 +190,22 @@ public class DeleteArtifactAction
|
|||
|
||||
public String doDelete()
|
||||
{
|
||||
Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
|
||||
|
||||
TimeZone timezone = TimeZone.getTimeZone( "UTC" );
|
||||
DateFormat fmt = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
|
||||
fmt.setTimeZone( timezone );
|
||||
ManagedRepositoryConfiguration repoConfig = configuration.getConfiguration().findManagedRepositoryById(
|
||||
repositoryId );
|
||||
|
||||
VersionedReference ref = new VersionedReference();
|
||||
ref.setArtifactId( artifactId );
|
||||
ref.setGroupId( groupId );
|
||||
ref.setVersion( version );
|
||||
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
Date lastUpdatedTimestamp = Calendar.getInstance().getTime();
|
||||
|
||||
TimeZone timezone = TimeZone.getTimeZone( "UTC" );
|
||||
DateFormat fmt = new SimpleDateFormat( "yyyyMMdd.HHmmss" );
|
||||
fmt.setTimeZone( timezone );
|
||||
ManagedRepositoryConfiguration repoConfig = configuration.getConfiguration().findManagedRepositoryById(
|
||||
repositoryId );
|
||||
|
||||
VersionedReference ref = new VersionedReference();
|
||||
ref.setArtifactId( artifactId );
|
||||
ref.setGroupId( groupId );
|
||||
ref.setVersion( version );
|
||||
|
||||
ManagedRepositoryContent repository = repositoryFactory.getManagedRepositoryContent( repositoryId );
|
||||
|
||||
String path = repository.toMetadataPath( ref );
|
||||
|
@ -230,6 +227,7 @@ public class DeleteArtifactAction
|
|||
|
||||
updateMetadata( metadata, metadataFile, lastUpdatedTimestamp );
|
||||
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
Collection<ArtifactMetadata> artifacts = metadataRepository.getArtifacts( repositoryId, groupId, artifactId,
|
||||
version );
|
||||
|
||||
|
@ -245,21 +243,14 @@ public class DeleteArtifactAction
|
|||
// repository metadata to an artifact
|
||||
for ( RepositoryListener listener : listeners )
|
||||
{
|
||||
listener.deleteArtifact( repository.getId(), artifact.getNamespace(), artifact.getProject(),
|
||||
artifact.getVersion(), artifact.getId() );
|
||||
listener.deleteArtifact( metadataRepository, repository.getId(), artifact.getNamespace(),
|
||||
artifact.getProject(), artifact.getVersion(), artifact.getId() );
|
||||
}
|
||||
|
||||
triggerAuditEvent( repositoryId, path, AuditEvent.REMOVE_FILE );
|
||||
}
|
||||
}
|
||||
|
||||
String msg = "Artifact \'" + groupId + ":" + artifactId + ":" + version +
|
||||
"\' was successfully deleted from repository \'" + repositoryId + "\'";
|
||||
|
||||
addActionMessage( msg );
|
||||
|
||||
reset();
|
||||
return SUCCESS;
|
||||
repositorySession.save();
|
||||
}
|
||||
catch ( ContentNotFoundException e )
|
||||
{
|
||||
|
@ -286,6 +277,18 @@ public class DeleteArtifactAction
|
|||
addActionError( "Repository exception: " + e.getMessage() );
|
||||
return ERROR;
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
String msg = "Artifact \'" + groupId + ":" + artifactId + ":" + version +
|
||||
"\' was successfully deleted from repository \'" + repositoryId + "\'";
|
||||
|
||||
addActionMessage( msg );
|
||||
|
||||
reset();
|
||||
return SUCCESS;
|
||||
}
|
||||
|
||||
private File getMetadata( String targetPath )
|
||||
|
@ -425,9 +428,4 @@ public class DeleteArtifactAction
|
|||
{
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
public void setMetadataRepository( MetadataRepository metadataRepository )
|
||||
{
|
||||
this.metadataRepository = metadataRepository;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,30 +19,30 @@ package org.apache.maven.archiva.web.action;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import com.opensymphony.xwork2.Validateable;
|
||||
import com.opensymphony.xwork2.Preparable;
|
||||
import org.apache.archiva.audit.Auditable;
|
||||
import com.opensymphony.xwork2.Validateable;
|
||||
import org.apache.archiva.audit.AuditEvent;
|
||||
import org.apache.archiva.stagerepository.merge.Maven2RepositoryMerger;
|
||||
import org.apache.archiva.audit.Auditable;
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.filter.Filter;
|
||||
import org.apache.archiva.metadata.repository.filter.IncludesFilter;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
import org.apache.archiva.stagerepository.merge.Maven2RepositoryMerger;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
import org.apache.maven.archiva.web.action.admin.SchedulerAction;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @plexus.component role="com.opensymphony.xwork2.Action" role-hint="mergeAction" instantiation-strategy="per-lookup"
|
||||
*/
|
||||
public class
|
||||
MergeAction
|
||||
public class MergeAction
|
||||
extends PlexusActionSupport
|
||||
implements Validateable, Preparable, Auditable
|
||||
|
||||
|
@ -57,11 +57,6 @@ public class
|
|||
*/
|
||||
protected ArchivaConfiguration archivaConfiguration;
|
||||
|
||||
/**
|
||||
* @plexus.requirement role-hint="default"
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
/**
|
||||
* @plexus.requirement role="com.opensymphony.xwork2.Action" role-hint="schedulerAction"
|
||||
*/
|
||||
|
@ -101,17 +96,19 @@ public class
|
|||
public String doMerge()
|
||||
throws Exception
|
||||
{
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( sourceRepoId );
|
||||
|
||||
if ( repository.isReleases() && !repository.isSnapshots() )
|
||||
{
|
||||
mergeWithOutSnapshots( sourceArtifacts, sourceRepoId, repoid );
|
||||
mergeWithOutSnapshots( metadataRepository, sourceArtifacts, sourceRepoId, repoid );
|
||||
}
|
||||
else
|
||||
{
|
||||
repositoryMerger.merge( sourceRepoId, repoid );
|
||||
repositoryMerger.merge( metadataRepository, sourceRepoId, repoid );
|
||||
|
||||
for ( ArtifactMetadata metadata : sourceArtifacts )
|
||||
{
|
||||
|
@ -130,25 +127,31 @@ public class
|
|||
addActionError( "Error occurred while merging the repositories." );
|
||||
return ERROR;
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
}
|
||||
|
||||
public String mergeBySkippingConflicts()
|
||||
{
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( sourceRepoId );
|
||||
sourceArtifacts.removeAll( conflictSourceArtifacts );
|
||||
|
||||
if ( repository.isReleases() && !repository.isSnapshots() )
|
||||
{
|
||||
mergeWithOutSnapshots( sourceArtifacts, sourceRepoId, repoid );
|
||||
mergeWithOutSnapshots( metadataRepository, sourceArtifacts, sourceRepoId, repoid );
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
Filter<ArtifactMetadata> artifactsWithOutConflicts =
|
||||
new IncludesFilter<ArtifactMetadata>( sourceArtifacts );
|
||||
repositoryMerger.merge( sourceRepoId, repoid, artifactsWithOutConflicts );
|
||||
Filter<ArtifactMetadata> artifactsWithOutConflicts = new IncludesFilter<ArtifactMetadata>(
|
||||
sourceArtifacts );
|
||||
repositoryMerger.merge( metadataRepository, sourceRepoId, repoid, artifactsWithOutConflicts );
|
||||
for ( ArtifactMetadata metadata : sourceArtifacts )
|
||||
{
|
||||
triggerAuditEvent( repoid, metadata.getId(), AuditEvent.MERGING_REPOSITORIES );
|
||||
|
@ -165,22 +168,31 @@ public class
|
|||
addActionError( "Error occurred while merging the repositories." );
|
||||
return ERROR;
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
}
|
||||
|
||||
public String mergeWithOutConlficts()
|
||||
public String mergeWithOutConlficts()
|
||||
{
|
||||
|
||||
sourceRepoId = repoid + "-stage";
|
||||
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
conflictSourceArtifacts = repositoryMerger.getConflictingArtifacts( sourceRepoId, repoid );
|
||||
conflictSourceArtifacts = repositoryMerger.getConflictingArtifacts( repositorySession.getRepository(),
|
||||
sourceRepoId, repoid );
|
||||
}
|
||||
catch ( Exception e )
|
||||
{
|
||||
addActionError( "Error occurred while merging the repositories." );
|
||||
return ERROR;
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
addActionMessage( "Repository '" + sourceRepoId + "' successfully merged to '" + repoid + "'." );
|
||||
|
||||
|
@ -201,9 +213,18 @@ public class
|
|||
throws Exception
|
||||
{
|
||||
sourceRepoId = repoid + "-stage";
|
||||
conflictSourceArtifacts = repositoryMerger.getConflictingArtifacts( sourceRepoId, repoid );
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
conflictSourceArtifacts = repositoryMerger.getConflictingArtifacts( repositorySession.getRepository(),
|
||||
sourceRepoId, repoid );
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
this.scheduler.setRepoid( repoid );
|
||||
|
||||
|
||||
Configuration config = archivaConfiguration.getConfiguration();
|
||||
this.repository = config.findManagedRepositoryById( repoid );
|
||||
setConflictSourceArtifactsToBeDisplayed( conflictSourceArtifacts );
|
||||
|
@ -263,7 +284,8 @@ public class
|
|||
}
|
||||
}
|
||||
|
||||
private void mergeWithOutSnapshots( List<ArtifactMetadata> sourceArtifacts, String sourceRepoId, String repoid )
|
||||
private void mergeWithOutSnapshots( MetadataRepository metadataRepository, List<ArtifactMetadata> sourceArtifacts,
|
||||
String sourceRepoId, String repoid )
|
||||
throws Exception
|
||||
{
|
||||
List<ArtifactMetadata> artifactsWithOutSnapshots = new ArrayList<ArtifactMetadata>();
|
||||
|
@ -283,7 +305,7 @@ public class
|
|||
sourceArtifacts.removeAll( artifactsWithOutSnapshots );
|
||||
|
||||
Filter<ArtifactMetadata> artifactListWithOutSnapShots = new IncludesFilter<ArtifactMetadata>( sourceArtifacts );
|
||||
repositoryMerger.merge( sourceRepoId, repoid, artifactListWithOutSnapShots );
|
||||
repositoryMerger.merge( metadataRepository, sourceRepoId, repoid, artifactListWithOutSnapShots );
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@ import com.opensymphony.xwork2.ActionSupport;
|
|||
import org.apache.archiva.audit.AuditEvent;
|
||||
import org.apache.archiva.audit.AuditListener;
|
||||
import org.apache.archiva.audit.Auditable;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.maven.archiva.security.ArchivaXworkUser;
|
||||
import org.apache.struts2.ServletActionContext;
|
||||
import org.apache.struts2.interceptor.SessionAware;
|
||||
|
@ -51,9 +52,14 @@ public abstract class PlexusActionSupport
|
|||
*/
|
||||
private List<AuditListener> auditListeners = new ArrayList<AuditListener>();
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
protected RepositorySessionFactory repositorySessionFactory;
|
||||
|
||||
private String principal;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings( "unchecked" )
|
||||
public void setSession( Map map )
|
||||
{
|
||||
this.session = map;
|
||||
|
@ -78,7 +84,7 @@ public abstract class PlexusActionSupport
|
|||
{
|
||||
AuditEvent event = new AuditEvent( repositoryId, getPrincipal(), resource, action );
|
||||
event.setRemoteIP( getRemoteAddr() );
|
||||
|
||||
|
||||
for ( AuditListener listener : auditListeners )
|
||||
{
|
||||
listener.auditEvent( event );
|
||||
|
@ -89,7 +95,7 @@ public abstract class PlexusActionSupport
|
|||
{
|
||||
AuditEvent event = new AuditEvent( null, getPrincipal(), resource, action );
|
||||
event.setRemoteIP( getRemoteAddr() );
|
||||
|
||||
|
||||
for ( AuditListener listener : auditListeners )
|
||||
{
|
||||
listener.auditEvent( event );
|
||||
|
@ -100,7 +106,7 @@ public abstract class PlexusActionSupport
|
|||
{
|
||||
AuditEvent event = new AuditEvent( null, getPrincipal(), null, action );
|
||||
event.setRemoteIP( getRemoteAddr() );
|
||||
|
||||
|
||||
for ( AuditListener listener : auditListeners )
|
||||
{
|
||||
listener.auditEvent( event );
|
||||
|
@ -122,7 +128,7 @@ public abstract class PlexusActionSupport
|
|||
}
|
||||
return ArchivaXworkUser.getActivePrincipal( ActionContext.getContext().getSession() );
|
||||
}
|
||||
|
||||
|
||||
void setPrincipal( String principal )
|
||||
{
|
||||
this.principal = principal;
|
||||
|
@ -132,4 +138,9 @@ public abstract class PlexusActionSupport
|
|||
{
|
||||
this.auditListeners = auditListeners;
|
||||
}
|
||||
|
||||
public void setRepositorySessionFactory( RepositorySessionFactory repositorySessionFactory )
|
||||
{
|
||||
this.repositorySessionFactory = repositorySessionFactory;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
package org.apache.maven.archiva.web.action;
|
||||
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
|
@ -19,12 +19,6 @@ package org.apache.maven.archiva.web.action;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.opensymphony.xwork2.Preparable;
|
||||
import org.apache.archiva.indexer.search.RepositorySearch;
|
||||
import org.apache.archiva.indexer.search.RepositorySearchException;
|
||||
|
@ -34,6 +28,7 @@ import org.apache.archiva.indexer.search.SearchResultLimits;
|
|||
import org.apache.archiva.indexer.search.SearchResults;
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.commons.collections.CollectionUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.maven.archiva.common.utils.VersionUtil;
|
||||
|
@ -43,12 +38,18 @@ import org.apache.struts2.ServletActionContext;
|
|||
import org.springframework.web.context.WebApplicationContext;
|
||||
import org.springframework.web.context.support.WebApplicationContextUtils;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Search all indexed fields by the given criteria.
|
||||
*
|
||||
* @plexus.component role="com.opensymphony.xwork2.Action" role-hint="searchAction" instantiation-strategy="per-lookup"
|
||||
*/
|
||||
public class SearchAction
|
||||
public class SearchAction
|
||||
extends AbstractRepositoryBasedAction
|
||||
implements Preparable
|
||||
{
|
||||
|
@ -70,15 +71,15 @@ public class SearchAction
|
|||
private static final String ARTIFACT = "artifact";
|
||||
|
||||
private List<ArtifactMetadata> databaseResults;
|
||||
|
||||
|
||||
private int currentPage = 0;
|
||||
|
||||
|
||||
private int totalPages;
|
||||
|
||||
|
||||
private boolean searchResultsOnly;
|
||||
|
||||
|
||||
private String completeQueryString;
|
||||
|
||||
|
||||
private static final String COMPLETE_QUERY_STRING_SEPARATOR = ";";
|
||||
|
||||
private List<String> managedRepositoryList;
|
||||
|
@ -102,16 +103,11 @@ public class SearchAction
|
|||
private boolean fromResultsPage;
|
||||
|
||||
private RepositorySearch nexusSearch;
|
||||
|
||||
|
||||
private Map<String, String> searchFields;
|
||||
|
||||
private String infoMessage;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
public boolean isFromResultsPage()
|
||||
{
|
||||
return fromResultsPage;
|
||||
|
@ -141,25 +137,25 @@ public class SearchAction
|
|||
{
|
||||
managedRepositoryList.add( "all" );
|
||||
}
|
||||
|
||||
|
||||
searchFields = new LinkedHashMap<String, String>();
|
||||
searchFields.put( "groupId", "Group ID" );
|
||||
searchFields.put( "artifactId", "Artifact ID" );
|
||||
searchFields.put( "version", "Version" );
|
||||
searchFields.put( "className", "Class/Package Name" );
|
||||
searchFields.put( "className", "Class/Package Name" );
|
||||
searchFields.put( "rowCount", "Row Count" );
|
||||
|
||||
super.clearErrorsAndMessages();
|
||||
|
||||
super.clearErrorsAndMessages();
|
||||
clearSearchFields();
|
||||
}
|
||||
|
||||
|
||||
private void clearSearchFields()
|
||||
{
|
||||
repositoryId = "";
|
||||
artifactId = "";
|
||||
groupId = "";
|
||||
version = "";
|
||||
className = "";
|
||||
className = "";
|
||||
rowCount = 30;
|
||||
currentPage = 0;
|
||||
}
|
||||
|
@ -167,44 +163,42 @@ public class SearchAction
|
|||
// advanced search MRM-90 -- filtered search
|
||||
public String filteredSearch()
|
||||
throws MalformedURLException
|
||||
{
|
||||
if ( ( groupId == null || "".equals( groupId ) ) &&
|
||||
( artifactId == null || "".equals( artifactId ) ) && ( className == null || "".equals( className ) ) &&
|
||||
( version == null || "".equals( version ) ) )
|
||||
{
|
||||
{
|
||||
if ( ( groupId == null || "".equals( groupId ) ) && ( artifactId == null || "".equals( artifactId ) ) &&
|
||||
( className == null || "".equals( className ) ) && ( version == null || "".equals( version ) ) )
|
||||
{
|
||||
addActionError( "Advanced Search - At least one search criteria must be provided." );
|
||||
return INPUT;
|
||||
}
|
||||
|
||||
|
||||
fromFilterSearch = true;
|
||||
|
||||
|
||||
if ( CollectionUtils.isEmpty( managedRepositoryList ) )
|
||||
{
|
||||
{
|
||||
return GlobalResults.ACCESS_TO_NO_REPOS;
|
||||
}
|
||||
|
||||
SearchResultLimits limits = new SearchResultLimits( currentPage );
|
||||
limits.setPageSize( rowCount );
|
||||
List<String> selectedRepos = new ArrayList<String>();
|
||||
|
||||
if ( repositoryId == null || StringUtils.isBlank( repositoryId ) ||
|
||||
"all".equals( StringUtils.stripToEmpty( repositoryId ) ) )
|
||||
|
||||
if ( repositoryId == null || StringUtils.isBlank( repositoryId ) || "all".equals( StringUtils.stripToEmpty(
|
||||
repositoryId ) ) )
|
||||
{
|
||||
selectedRepos = getObservableRepos();
|
||||
}
|
||||
else
|
||||
{
|
||||
selectedRepos.add( repositoryId );
|
||||
}
|
||||
}
|
||||
|
||||
if ( CollectionUtils.isEmpty( selectedRepos ) )
|
||||
{
|
||||
{
|
||||
return GlobalResults.ACCESS_TO_NO_REPOS;
|
||||
}
|
||||
|
||||
SearchFields searchFields =
|
||||
new SearchFields( groupId, artifactId, version, null, className, selectedRepos );
|
||||
|
||||
SearchFields searchFields = new SearchFields( groupId, artifactId, version, null, className, selectedRepos );
|
||||
|
||||
// TODO: add packaging in the list of fields for advanced search (UI)?
|
||||
try
|
||||
{
|
||||
|
@ -215,7 +209,7 @@ public class SearchAction
|
|||
addActionError( e.getMessage() );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
|
||||
if ( results.isEmpty() )
|
||||
{
|
||||
addActionError( "No results found" );
|
||||
|
@ -229,19 +223,19 @@ public class SearchAction
|
|||
totalPages = totalPages + 1;
|
||||
}
|
||||
|
||||
for (SearchResultHit hit : results.getHits())
|
||||
for ( SearchResultHit hit : results.getHits() )
|
||||
{
|
||||
final String version = hit.getVersion();
|
||||
if (version != null)
|
||||
if ( version != null )
|
||||
{
|
||||
hit.setVersion(VersionUtil.getBaseVersion(version));
|
||||
hit.setVersion( VersionUtil.getBaseVersion( version ) );
|
||||
}
|
||||
}
|
||||
|
||||
return SUCCESS;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings( "unchecked" )
|
||||
public String quickSearch()
|
||||
throws MalformedURLException
|
||||
{
|
||||
|
@ -265,14 +259,15 @@ public class SearchAction
|
|||
|
||||
try
|
||||
{
|
||||
if( searchResultsOnly && !completeQueryString.equals( "" ) )
|
||||
{
|
||||
results = getNexusSearch().search( getPrincipal(), selectedRepos, q, limits, parseCompleteQueryString() );
|
||||
if ( searchResultsOnly && !completeQueryString.equals( "" ) )
|
||||
{
|
||||
results = getNexusSearch().search( getPrincipal(), selectedRepos, q, limits,
|
||||
parseCompleteQueryString() );
|
||||
}
|
||||
else
|
||||
{
|
||||
completeQueryString = "";
|
||||
results = getNexusSearch().search( getPrincipal(), selectedRepos, q, limits, null );
|
||||
completeQueryString = "";
|
||||
results = getNexusSearch().search( getPrincipal(), selectedRepos, q, limits, null );
|
||||
}
|
||||
}
|
||||
catch ( RepositorySearchException e )
|
||||
|
@ -289,16 +284,16 @@ public class SearchAction
|
|||
|
||||
totalPages = results.getTotalHits() / limits.getPageSize();
|
||||
|
||||
if( (results.getTotalHits() % limits.getPageSize()) != 0 )
|
||||
if ( ( results.getTotalHits() % limits.getPageSize() ) != 0 )
|
||||
{
|
||||
totalPages = totalPages + 1;
|
||||
}
|
||||
|
||||
if( !isEqualToPreviousSearchTerm( q ) )
|
||||
if ( !isEqualToPreviousSearchTerm( q ) )
|
||||
{
|
||||
buildCompleteQueryString( q );
|
||||
}
|
||||
|
||||
|
||||
return SUCCESS;
|
||||
}
|
||||
|
||||
|
@ -314,9 +309,18 @@ public class SearchAction
|
|||
}
|
||||
|
||||
databaseResults = new ArrayList<ArtifactMetadata>();
|
||||
for ( String repoId : getObservableRepos() )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
databaseResults.addAll( metadataRepository.getArtifactsByChecksum( repoId, q ) );
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
for ( String repoId : getObservableRepos() )
|
||||
{
|
||||
databaseResults.addAll( metadataRepository.getArtifactsByChecksum( repoId, q ) );
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
if ( databaseResults.isEmpty() )
|
||||
|
@ -333,7 +337,7 @@ public class SearchAction
|
|||
|
||||
return RESULTS;
|
||||
}
|
||||
|
||||
|
||||
public String doInput()
|
||||
{
|
||||
return INPUT;
|
||||
|
@ -541,11 +545,11 @@ public class SearchAction
|
|||
public RepositorySearch getNexusSearch()
|
||||
{
|
||||
// no need to do this when wiring is already in spring
|
||||
if( nexusSearch == null )
|
||||
if ( nexusSearch == null )
|
||||
{
|
||||
WebApplicationContext wac =
|
||||
WebApplicationContextUtils.getRequiredWebApplicationContext( ServletActionContext.getServletContext() );
|
||||
nexusSearch = ( RepositorySearch ) wac.getBean( "nexusSearch" );
|
||||
WebApplicationContext wac = WebApplicationContextUtils.getRequiredWebApplicationContext(
|
||||
ServletActionContext.getServletContext() );
|
||||
nexusSearch = (RepositorySearch) wac.getBean( "nexusSearch" );
|
||||
}
|
||||
return nexusSearch;
|
||||
}
|
||||
|
@ -564,7 +568,7 @@ public class SearchAction
|
|||
{
|
||||
this.searchFields = searchFields;
|
||||
}
|
||||
|
||||
|
||||
public String getInfoMessage()
|
||||
{
|
||||
return infoMessage;
|
||||
|
@ -574,9 +578,4 @@ public class SearchAction
|
|||
{
|
||||
this.infoMessage = infoMessage;
|
||||
}
|
||||
|
||||
public void setMetadataRepository( MetadataRepository metadataRepository )
|
||||
{
|
||||
this.metadataRepository = metadataRepository;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.archiva.metadata.repository.MetadataRepository;
|
|||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolutionException;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolver;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.storage.maven2.MavenArtifactFacet;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.maven.archiva.model.ArtifactReference;
|
||||
|
@ -63,21 +64,11 @@ public class ShowArtifactAction
|
|||
{
|
||||
/* .\ Not Exposed \._____________________________________________ */
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataResolver metadataResolver;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private RepositoryContentFactory repositoryFactory;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
/* .\ Exposed Output Objects \.__________________________________ */
|
||||
|
||||
private String groupId;
|
||||
|
@ -120,15 +111,26 @@ public class ShowArtifactAction
|
|||
*/
|
||||
public String artifact()
|
||||
{
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
return handleArtifact( repositorySession );
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
}
|
||||
|
||||
private String handleArtifact( RepositorySession session )
|
||||
{
|
||||
// In the future, this should be replaced by the repository grouping mechanism, so that we are only making
|
||||
// simple resource requests here and letting the resolver take care of it
|
||||
String errorMsg = null;
|
||||
ProjectVersionMetadata versionMetadata = getProjectVersionMetadata();
|
||||
ProjectVersionMetadata versionMetadata = getProjectVersionMetadata( session );
|
||||
|
||||
if ( versionMetadata == null )
|
||||
{
|
||||
addActionError( errorMsg != null ? errorMsg : "Artifact not found" );
|
||||
addActionError( "Artifact not found" );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
|
@ -142,13 +144,14 @@ public class ShowArtifactAction
|
|||
return SUCCESS;
|
||||
}
|
||||
|
||||
private ProjectVersionMetadata getProjectVersionMetadata()
|
||||
private ProjectVersionMetadata getProjectVersionMetadata( RepositorySession session )
|
||||
{
|
||||
ProjectVersionMetadata versionMetadata = null;
|
||||
artifacts = new LinkedHashMap<String, List<ArtifactDownloadInfo>>();
|
||||
|
||||
List<String> repos = getObservableRepos();
|
||||
|
||||
MetadataResolver metadataResolver = session.getResolver();
|
||||
for ( String repoId : repos )
|
||||
{
|
||||
if ( versionMetadata == null )
|
||||
|
@ -157,7 +160,8 @@ public class ShowArtifactAction
|
|||
// "just-in-time" nature of picking up the metadata (if appropriate for the repository type) is used
|
||||
try
|
||||
{
|
||||
versionMetadata = metadataResolver.resolveProjectVersion( repoId, groupId, artifactId, version );
|
||||
versionMetadata = metadataResolver.resolveProjectVersion( session, repoId, groupId, artifactId,
|
||||
version );
|
||||
}
|
||||
catch ( MetadataResolutionException e )
|
||||
{
|
||||
|
@ -174,7 +178,8 @@ public class ShowArtifactAction
|
|||
List<ArtifactMetadata> artifacts;
|
||||
try
|
||||
{
|
||||
artifacts = new ArrayList<ArtifactMetadata>( metadataResolver.resolveArtifacts( repoId, groupId,
|
||||
artifacts = new ArrayList<ArtifactMetadata>( metadataResolver.resolveArtifacts( session, repoId,
|
||||
groupId,
|
||||
artifactId,
|
||||
version ) );
|
||||
}
|
||||
|
@ -262,10 +267,20 @@ public class ShowArtifactAction
|
|||
{
|
||||
List<ProjectVersionReference> references = new ArrayList<ProjectVersionReference>();
|
||||
// TODO: what if we get duplicates across repositories?
|
||||
for ( String repoId : getObservableRepos() )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
// TODO: what about if we want to see this irrespective of version?
|
||||
references.addAll( metadataResolver.resolveProjectReferences( repoId, groupId, artifactId, version ) );
|
||||
MetadataResolver metadataResolver = repositorySession.getResolver();
|
||||
for ( String repoId : getObservableRepos() )
|
||||
{
|
||||
// TODO: what about if we want to see this irrespective of version?
|
||||
references.addAll( metadataResolver.resolveProjectReferences( repositorySession, repoId, groupId,
|
||||
artifactId, version ) );
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
this.dependees = references;
|
||||
|
@ -314,46 +329,56 @@ public class ShowArtifactAction
|
|||
|
||||
public String addMetadataProperty()
|
||||
{
|
||||
String errorMsg = null;
|
||||
|
||||
ProjectVersionMetadata projectMetadata = getProjectVersionMetadata();
|
||||
if ( projectMetadata == null )
|
||||
{
|
||||
addActionError( errorMsg != null ? errorMsg : "Artifact not found" );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
if ( projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ) == null )
|
||||
{
|
||||
genericMetadata = new HashMap<String, String>();
|
||||
}
|
||||
else
|
||||
{
|
||||
genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
|
||||
}
|
||||
|
||||
if ( propertyName == null || "".equals( propertyName.trim() ) || propertyValue == null || "".equals(
|
||||
propertyValue.trim() ) )
|
||||
{
|
||||
model = projectMetadata;
|
||||
addActionError( errorMsg != null ? errorMsg : "Property Name and Property Value are required." );
|
||||
return INPUT;
|
||||
}
|
||||
|
||||
genericMetadata.put( propertyName, propertyValue );
|
||||
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
ProjectVersionMetadata projectMetadata;
|
||||
try
|
||||
{
|
||||
updateProjectMetadata( projectMetadata );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to persist modified project metadata after adding entry: " + e.getMessage(), e );
|
||||
addActionError( "Unable to add metadata item to underlying content storage - consult application logs." );
|
||||
return ERROR;
|
||||
}
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
projectMetadata = getProjectVersionMetadata( repositorySession );
|
||||
if ( projectMetadata == null )
|
||||
{
|
||||
addActionError( "Artifact not found" );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
projectMetadata = getProjectVersionMetadata();
|
||||
if ( projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ) == null )
|
||||
{
|
||||
genericMetadata = new HashMap<String, String>();
|
||||
}
|
||||
else
|
||||
{
|
||||
genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
|
||||
}
|
||||
|
||||
if ( propertyName == null || "".equals( propertyName.trim() ) || propertyValue == null || "".equals(
|
||||
propertyValue.trim() ) )
|
||||
{
|
||||
model = projectMetadata;
|
||||
addActionError( "Property Name and Property Value are required." );
|
||||
return INPUT;
|
||||
}
|
||||
|
||||
genericMetadata.put( propertyName, propertyValue );
|
||||
|
||||
try
|
||||
{
|
||||
updateProjectMetadata( projectMetadata, metadataRepository );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to persist modified project metadata after adding entry: " + e.getMessage(), e );
|
||||
addActionError(
|
||||
"Unable to add metadata item to underlying content storage - consult application logs." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
// TODO: why re-retrieve?
|
||||
projectMetadata = getProjectVersionMetadata( repositorySession );
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
|
||||
|
||||
|
@ -367,57 +392,66 @@ public class ShowArtifactAction
|
|||
|
||||
public String deleteMetadataEntry()
|
||||
{
|
||||
ProjectVersionMetadata projectMetadata = getProjectVersionMetadata();
|
||||
String errorMsg = null;
|
||||
|
||||
if ( projectMetadata == null )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
addActionError( "Artifact not found" );
|
||||
return ERROR;
|
||||
}
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
ProjectVersionMetadata projectMetadata = getProjectVersionMetadata( repositorySession );
|
||||
|
||||
if ( projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ) != null )
|
||||
{
|
||||
genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
|
||||
|
||||
if ( !StringUtils.isEmpty( deleteItem ) )
|
||||
if ( projectMetadata == null )
|
||||
{
|
||||
genericMetadata.remove( deleteItem );
|
||||
|
||||
try
|
||||
{
|
||||
updateProjectMetadata( projectMetadata );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to persist modified project metadata after removing entry: " + e.getMessage(),
|
||||
e );
|
||||
addActionError(
|
||||
"Unable to remove metadata item to underlying content storage - consult application logs." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
projectMetadata = getProjectVersionMetadata();
|
||||
|
||||
genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
|
||||
|
||||
model = projectMetadata;
|
||||
|
||||
addActionMessage( "Property successfully deleted." );
|
||||
addActionError( "Artifact not found" );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
deleteItem = "";
|
||||
if ( projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ) != null )
|
||||
{
|
||||
genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
|
||||
|
||||
if ( !StringUtils.isEmpty( deleteItem ) )
|
||||
{
|
||||
genericMetadata.remove( deleteItem );
|
||||
|
||||
try
|
||||
{
|
||||
updateProjectMetadata( projectMetadata, metadataRepository );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to persist modified project metadata after removing entry: " + e.getMessage(),
|
||||
e );
|
||||
addActionError(
|
||||
"Unable to remove metadata item to underlying content storage - consult application logs." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
// TODO: why re-retrieve?
|
||||
projectMetadata = getProjectVersionMetadata( repositorySession );
|
||||
|
||||
genericMetadata = projectMetadata.getFacet( GenericMetadataFacet.FACET_ID ).toProperties();
|
||||
|
||||
model = projectMetadata;
|
||||
|
||||
addActionMessage( "Property successfully deleted." );
|
||||
}
|
||||
|
||||
deleteItem = "";
|
||||
}
|
||||
else
|
||||
{
|
||||
addActionError( "No generic metadata facet for this artifact." );
|
||||
return ERROR;
|
||||
}
|
||||
}
|
||||
else
|
||||
finally
|
||||
{
|
||||
addActionError( "No generic metadata facet for this artifact." );
|
||||
return ERROR;
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
return SUCCESS;
|
||||
}
|
||||
|
||||
private void updateProjectMetadata( ProjectVersionMetadata projectMetadata )
|
||||
private void updateProjectMetadata( ProjectVersionMetadata projectMetadata, MetadataRepository metadataRepository )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
GenericMetadataFacet genericMetadataFacet = new GenericMetadataFacet();
|
||||
|
@ -507,11 +541,6 @@ public class ShowArtifactAction
|
|||
this.repositoryId = repositoryId;
|
||||
}
|
||||
|
||||
public MetadataResolver getMetadataResolver()
|
||||
{
|
||||
return metadataResolver;
|
||||
}
|
||||
|
||||
public Map<String, List<ArtifactDownloadInfo>> getArtifacts()
|
||||
{
|
||||
return artifacts;
|
||||
|
@ -567,11 +596,6 @@ public class ShowArtifactAction
|
|||
this.repositoryFactory = repositoryFactory;
|
||||
}
|
||||
|
||||
public void setMetadataRepository( MetadataRepository metadataRepository )
|
||||
{
|
||||
this.metadataRepository = metadataRepository;
|
||||
}
|
||||
|
||||
// TODO: move this into the artifact metadata itself via facets where necessary
|
||||
|
||||
public class ArtifactDownloadInfo
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.opensymphony.xwork2.Preparable;
|
|||
import org.apache.archiva.audit.AuditEvent;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
|
@ -55,11 +56,6 @@ public class DeleteManagedRepositoryAction
|
|||
*/
|
||||
private RepositoryStatisticsManager repositoryStatisticsManager;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
public void prepare()
|
||||
{
|
||||
if ( StringUtils.isNotBlank( repoid ) )
|
||||
|
@ -103,17 +99,18 @@ public class DeleteManagedRepositoryAction
|
|||
|
||||
String result;
|
||||
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
Configuration configuration = archivaConfiguration.getConfiguration();
|
||||
if ( attachedStagingRepo != null )
|
||||
{
|
||||
cleanupRepositoryData( attachedStagingRepo );
|
||||
cleanupRepositoryData( attachedStagingRepo, repositorySession );
|
||||
removeRepository( repoid + "-stage", configuration );
|
||||
triggerAuditEvent( repoid + "-stage", null, AuditEvent.DELETE_MANAGED_REPO );
|
||||
|
||||
}
|
||||
cleanupRepositoryData( existingRepository );
|
||||
cleanupRepositoryData( existingRepository, repositorySession );
|
||||
removeRepository( repoid, configuration );
|
||||
triggerAuditEvent( repoid, null, AuditEvent.DELETE_MANAGED_REPO );
|
||||
result = saveConfiguration( configuration );
|
||||
|
@ -148,17 +145,24 @@ public class DeleteManagedRepositoryAction
|
|||
"Unable to delete repository, content may already be partially removed: " + e.getMessage() );
|
||||
result = ERROR;
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private void cleanupRepositoryData( ManagedRepositoryConfiguration cleanupRepository )
|
||||
private void cleanupRepositoryData( ManagedRepositoryConfiguration cleanupRepository,
|
||||
RepositorySession repositorySession )
|
||||
throws RoleManagerException, MetadataRepositoryException
|
||||
{
|
||||
removeRepositoryRoles( cleanupRepository );
|
||||
cleanupDatabase( cleanupRepository.getId() );
|
||||
repositoryStatisticsManager.deleteStatistics( cleanupRepository.getId() );
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
cleanupDatabase( metadataRepository, cleanupRepository.getId() );
|
||||
repositoryStatisticsManager.deleteStatistics( metadataRepository, cleanupRepository.getId() );
|
||||
// TODO: delete all content for a repository from the content API?
|
||||
repositorySession.save();
|
||||
|
||||
List<ProxyConnectorConfiguration> proxyConnectors = getProxyConnectors();
|
||||
for ( ProxyConnectorConfiguration proxyConnector : proxyConnectors )
|
||||
|
@ -184,7 +188,7 @@ public class DeleteManagedRepositoryAction
|
|||
}
|
||||
}
|
||||
|
||||
private void cleanupDatabase( String repoId )
|
||||
private void cleanupDatabase( MetadataRepository metadataRepository, String repoId )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
metadataRepository.removeRepository( repoId );
|
||||
|
@ -214,9 +218,4 @@ public class DeleteManagedRepositoryAction
|
|||
{
|
||||
this.repositoryStatisticsManager = repositoryStatisticsManager;
|
||||
}
|
||||
|
||||
public void setMetadataRepository( MetadataRepository metadataRepository )
|
||||
{
|
||||
this.metadataRepository = metadataRepository;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.opensymphony.xwork2.Preparable;
|
|||
import com.opensymphony.xwork2.Validateable;
|
||||
import org.apache.archiva.audit.AuditEvent;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
|
@ -127,6 +128,7 @@ public class EditManagedRepositoryAction
|
|||
|
||||
// Save the repository configuration.
|
||||
String result;
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
addRepository( repository, configuration );
|
||||
|
@ -154,7 +156,8 @@ public class EditManagedRepositoryAction
|
|||
result = saveConfiguration( configuration );
|
||||
if ( resetStats )
|
||||
{
|
||||
resetStatistics();
|
||||
repositoryStatisticsManager.deleteStatistics( repositorySession.getRepository(), repository.getId() );
|
||||
repositorySession.save();
|
||||
}
|
||||
}
|
||||
catch ( IOException e )
|
||||
|
@ -172,6 +175,10 @@ public class EditManagedRepositoryAction
|
|||
addActionError( "Metadata Exception: " + e.getMessage() );
|
||||
result = ERROR;
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
@ -225,12 +232,6 @@ public class EditManagedRepositoryAction
|
|||
}
|
||||
}
|
||||
|
||||
private void resetStatistics()
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
repositoryStatisticsManager.deleteStatistics( repository.getId() );
|
||||
}
|
||||
|
||||
public String getRepoid()
|
||||
{
|
||||
return repoid;
|
||||
|
|
|
@ -20,7 +20,9 @@ package org.apache.maven.archiva.web.action.admin.repositories;
|
|||
*/
|
||||
|
||||
import com.opensymphony.xwork2.Preparable;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
|
@ -107,24 +109,33 @@ public class RepositoriesAction
|
|||
Collections.sort( remoteRepositories, new RepositoryConfigurationComparator() );
|
||||
|
||||
repositoryStatistics = new HashMap<String, RepositoryStatistics>();
|
||||
for ( ManagedRepositoryConfiguration repo : managedRepositories )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
RepositoryStatistics stats = null;
|
||||
try
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
for ( ManagedRepositoryConfiguration repo : managedRepositories )
|
||||
{
|
||||
stats = repositoryStatisticsManager.getLastStatistics( repo.getId() );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
addActionError(
|
||||
"Error retrieving statistics for repository " + repo.getId() + " - consult application logs" );
|
||||
log.warn( "Error retrieving repository statistics: " + e.getMessage(), e );
|
||||
}
|
||||
if ( stats != null )
|
||||
{
|
||||
repositoryStatistics.put( repo.getId(), stats );
|
||||
RepositoryStatistics stats = null;
|
||||
try
|
||||
{
|
||||
stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, repo.getId() );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
addActionError(
|
||||
"Error retrieving statistics for repository " + repo.getId() + " - consult application logs" );
|
||||
log.warn( "Error retrieving repository statistics: " + e.getMessage(), e );
|
||||
}
|
||||
if ( stats != null )
|
||||
{
|
||||
repositoryStatistics.put( repo.getId(), stats );
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
}
|
||||
|
||||
public List<ManagedRepositoryConfiguration> getManagedRepositories()
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.apache.maven.archiva.web.action.reports;
|
|||
import com.opensymphony.xwork2.Preparable;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
import org.apache.archiva.reports.RepositoryProblemFacet;
|
||||
|
@ -110,11 +111,6 @@ public class GenerateReportAction
|
|||
|
||||
private boolean lastPage;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
@SuppressWarnings( "unchecked" )
|
||||
public void prepare()
|
||||
{
|
||||
|
@ -163,56 +159,24 @@ public class GenerateReportAction
|
|||
Date startDateInDF;
|
||||
Date endDateInDF;
|
||||
|
||||
if ( selectedRepositories.size() > 1 )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
numPages = 1;
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
if ( selectedRepositories.size() > 1 )
|
||||
{
|
||||
numPages = 1;
|
||||
|
||||
try
|
||||
{
|
||||
startDateInDF = getStartDateInDateFormat();
|
||||
endDateInDF = getEndDateInDateFormat();
|
||||
}
|
||||
catch ( ParseException e )
|
||||
{
|
||||
addActionError( "Error parsing date(s)." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
|
||||
{
|
||||
addFieldError( "startDate", "Start Date must be earlier than the End Date" );
|
||||
return INPUT;
|
||||
}
|
||||
|
||||
// multiple repos
|
||||
for ( String repo : selectedRepositories )
|
||||
{
|
||||
List<RepositoryStatistics> stats = null;
|
||||
try
|
||||
{
|
||||
stats = repositoryStatisticsManager.getStatisticsInRange( repo, startDateInDF, endDateInDF );
|
||||
startDateInDF = getStartDateInDateFormat();
|
||||
endDateInDF = getEndDateInDateFormat();
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
catch ( ParseException e )
|
||||
{
|
||||
log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
|
||||
addActionError( "Error parsing date(s)." );
|
||||
return ERROR;
|
||||
}
|
||||
if ( stats == null || stats.isEmpty() )
|
||||
{
|
||||
log.info( "No statistics available for repository '" + repo + "'." );
|
||||
// TODO set repo's stats to 0
|
||||
continue;
|
||||
}
|
||||
|
||||
repositoryStatistics.add( stats.get( 0 ) );
|
||||
}
|
||||
}
|
||||
else if ( selectedRepositories.size() == 1 )
|
||||
{
|
||||
repositoryId = selectedRepositories.get( 0 );
|
||||
try
|
||||
{
|
||||
startDateInDF = getStartDateInDateFormat();
|
||||
endDateInDF = getEndDateInDateFormat();
|
||||
|
||||
if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
|
||||
{
|
||||
|
@ -220,55 +184,98 @@ public class GenerateReportAction
|
|||
return INPUT;
|
||||
}
|
||||
|
||||
List<RepositoryStatistics> stats = null;
|
||||
// multiple repos
|
||||
for ( String repo : selectedRepositories )
|
||||
{
|
||||
List<RepositoryStatistics> stats = null;
|
||||
try
|
||||
{
|
||||
stats = repositoryStatisticsManager.getStatisticsInRange( metadataRepository, repo,
|
||||
startDateInDF, endDateInDF );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
|
||||
}
|
||||
if ( stats == null || stats.isEmpty() )
|
||||
{
|
||||
log.info( "No statistics available for repository '" + repo + "'." );
|
||||
// TODO set repo's stats to 0
|
||||
continue;
|
||||
}
|
||||
|
||||
repositoryStatistics.add( stats.get( 0 ) );
|
||||
}
|
||||
}
|
||||
else if ( selectedRepositories.size() == 1 )
|
||||
{
|
||||
repositoryId = selectedRepositories.get( 0 );
|
||||
try
|
||||
{
|
||||
stats = repositoryStatisticsManager.getStatisticsInRange( repositoryId, startDateInDF,
|
||||
endDateInDF );
|
||||
startDateInDF = getStartDateInDateFormat();
|
||||
endDateInDF = getEndDateInDateFormat();
|
||||
|
||||
if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
|
||||
{
|
||||
addFieldError( "startDate", "Start Date must be earlier than the End Date" );
|
||||
return INPUT;
|
||||
}
|
||||
|
||||
List<RepositoryStatistics> stats = null;
|
||||
try
|
||||
{
|
||||
stats = repositoryStatisticsManager.getStatisticsInRange( metadataRepository, repositoryId,
|
||||
startDateInDF, endDateInDF );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
|
||||
}
|
||||
if ( stats == null || stats.isEmpty() )
|
||||
{
|
||||
addActionError(
|
||||
"No statistics available for repository. Repository might not have been scanned." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
int rowCount = getRowCount();
|
||||
int extraPage = ( stats.size() % rowCount ) != 0 ? 1 : 0;
|
||||
int totalPages = ( stats.size() / rowCount ) + extraPage;
|
||||
numPages = totalPages;
|
||||
|
||||
int currentPage = getPage();
|
||||
if ( currentPage > totalPages )
|
||||
{
|
||||
addActionError(
|
||||
"Error encountered while generating report :: The requested page exceeds the total number of pages." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
int start = rowCount * ( currentPage - 1 );
|
||||
int end = ( start + rowCount ) - 1;
|
||||
|
||||
if ( end > stats.size() )
|
||||
{
|
||||
end = stats.size() - 1;
|
||||
}
|
||||
|
||||
repositoryStatistics = stats.subList( start, end + 1 );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
catch ( ParseException pe )
|
||||
{
|
||||
log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
|
||||
}
|
||||
if ( stats == null || stats.isEmpty() )
|
||||
{
|
||||
addActionError( "No statistics available for repository. Repository might not have been scanned." );
|
||||
addActionError( pe.getMessage() );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
int rowCount = getRowCount();
|
||||
int extraPage = ( stats.size() % rowCount ) != 0 ? 1 : 0;
|
||||
int totalPages = ( stats.size() / rowCount ) + extraPage;
|
||||
numPages = totalPages;
|
||||
|
||||
int currentPage = getPage();
|
||||
if ( currentPage > totalPages )
|
||||
{
|
||||
addActionError(
|
||||
"Error encountered while generating report :: The requested page exceeds the total number of pages." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
int start = rowCount * ( currentPage - 1 );
|
||||
int end = ( start + rowCount ) - 1;
|
||||
|
||||
if ( end > stats.size() )
|
||||
{
|
||||
end = stats.size() - 1;
|
||||
}
|
||||
|
||||
repositoryStatistics = stats.subList( start, end + 1 );
|
||||
}
|
||||
catch ( ParseException pe )
|
||||
else
|
||||
{
|
||||
addActionError( pe.getMessage() );
|
||||
return ERROR;
|
||||
addFieldError( "availableRepositories", "Please select a repository (or repositories) from the list." );
|
||||
return INPUT;
|
||||
}
|
||||
}
|
||||
else
|
||||
finally
|
||||
{
|
||||
addFieldError( "availableRepositories", "Please select a repository (or repositories) from the list." );
|
||||
return INPUT;
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
if ( repositoryStatistics.isEmpty() )
|
||||
|
@ -293,72 +300,22 @@ public class GenerateReportAction
|
|||
List<RepositoryStatistics> repositoryStatistics = new ArrayList<RepositoryStatistics>();
|
||||
|
||||
StringBuffer input;
|
||||
if ( selectedRepositories.size() > 1 )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
try
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
if ( selectedRepositories.size() > 1 )
|
||||
{
|
||||
startDateInDF = getStartDateInDateFormat();
|
||||
endDateInDF = getEndDateInDateFormat();
|
||||
}
|
||||
catch ( ParseException e )
|
||||
{
|
||||
addActionError( "Error parsing date(s)." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
|
||||
{
|
||||
addFieldError( "startDate", "Start Date must be earlier than the End Date" );
|
||||
return INPUT;
|
||||
}
|
||||
|
||||
input = new StringBuffer(
|
||||
"Repository,Total File Count,Total Size,Artifact Count,Group Count,Project Count,Plugins,Archetypes," +
|
||||
"Jars,Wars\n" );
|
||||
|
||||
// multiple repos
|
||||
for ( String repo : selectedRepositories )
|
||||
{
|
||||
List<RepositoryStatistics> stats = null;
|
||||
try
|
||||
{
|
||||
stats = repositoryStatisticsManager.getStatisticsInRange( repo, startDateInDF, endDateInDF );
|
||||
startDateInDF = getStartDateInDateFormat();
|
||||
endDateInDF = getEndDateInDateFormat();
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
catch ( ParseException e )
|
||||
{
|
||||
log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
|
||||
addActionError( "Error parsing date(s)." );
|
||||
return ERROR;
|
||||
}
|
||||
if ( stats == null || stats.isEmpty() )
|
||||
{
|
||||
log.info( "No statistics available for repository '" + repo + "'." );
|
||||
// TODO set repo's stats to 0
|
||||
continue;
|
||||
}
|
||||
|
||||
// only the first one
|
||||
RepositoryStatistics repositoryStats = stats.get( 0 );
|
||||
repositoryStatistics.add( repositoryStats );
|
||||
|
||||
input.append( repo ).append( "," );
|
||||
input.append( repositoryStats.getTotalFileCount() ).append( "," );
|
||||
input.append( repositoryStats.getTotalArtifactFileSize() ).append( "," );
|
||||
input.append( repositoryStats.getTotalArtifactCount() ).append( "," );
|
||||
input.append( repositoryStats.getTotalGroupCount() ).append( "," );
|
||||
input.append( repositoryStats.getTotalProjectCount() ).append( "," );
|
||||
input.append( repositoryStats.getTotalCountForType( "maven-plugin" ) ).append( "," );
|
||||
input.append( repositoryStats.getTotalCountForType( "maven-archetype" ) ).append( "," );
|
||||
input.append( repositoryStats.getTotalCountForType( "jar" ) ).append( "," );
|
||||
input.append( repositoryStats.getTotalCountForType( "war" ) );
|
||||
input.append( "\n" );
|
||||
}
|
||||
}
|
||||
else if ( selectedRepositories.size() == 1 )
|
||||
{
|
||||
repositoryId = selectedRepositories.get( 0 );
|
||||
try
|
||||
{
|
||||
startDateInDF = getStartDateInDateFormat();
|
||||
endDateInDF = getEndDateInDateFormat();
|
||||
|
||||
if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
|
||||
{
|
||||
|
@ -366,29 +323,35 @@ public class GenerateReportAction
|
|||
return INPUT;
|
||||
}
|
||||
|
||||
List<RepositoryStatistics> stats = null;
|
||||
try
|
||||
{
|
||||
stats = repositoryStatisticsManager.getStatisticsInRange( repositoryId, startDateInDF,
|
||||
endDateInDF );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
|
||||
}
|
||||
if ( stats == null || stats.isEmpty() )
|
||||
{
|
||||
addActionError( "No statistics available for repository. Repository might not have been scanned." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
input = new StringBuffer(
|
||||
"Date of Scan,Total File Count,Total Size,Artifact Count,Group Count,Project Count,Plugins," +
|
||||
"Archetypes,Jars,Wars\n" );
|
||||
"Repository,Total File Count,Total Size,Artifact Count,Group Count,Project Count,Plugins,Archetypes," +
|
||||
"Jars,Wars\n" );
|
||||
|
||||
for ( RepositoryStatistics repositoryStats : stats )
|
||||
// multiple repos
|
||||
for ( String repo : selectedRepositories )
|
||||
{
|
||||
input.append( repositoryStats.getScanStartTime() ).append( "," );
|
||||
List<RepositoryStatistics> stats = null;
|
||||
try
|
||||
{
|
||||
stats = repositoryStatisticsManager.getStatisticsInRange( metadataRepository, repo,
|
||||
startDateInDF, endDateInDF );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
|
||||
}
|
||||
if ( stats == null || stats.isEmpty() )
|
||||
{
|
||||
log.info( "No statistics available for repository '" + repo + "'." );
|
||||
// TODO set repo's stats to 0
|
||||
continue;
|
||||
}
|
||||
|
||||
// only the first one
|
||||
RepositoryStatistics repositoryStats = stats.get( 0 );
|
||||
repositoryStatistics.add( repositoryStats );
|
||||
|
||||
input.append( repo ).append( "," );
|
||||
input.append( repositoryStats.getTotalFileCount() ).append( "," );
|
||||
input.append( repositoryStats.getTotalArtifactFileSize() ).append( "," );
|
||||
input.append( repositoryStats.getTotalArtifactCount() ).append( "," );
|
||||
|
@ -400,19 +363,74 @@ public class GenerateReportAction
|
|||
input.append( repositoryStats.getTotalCountForType( "war" ) );
|
||||
input.append( "\n" );
|
||||
}
|
||||
|
||||
repositoryStatistics = stats;
|
||||
}
|
||||
catch ( ParseException pe )
|
||||
else if ( selectedRepositories.size() == 1 )
|
||||
{
|
||||
addActionError( pe.getMessage() );
|
||||
return ERROR;
|
||||
repositoryId = selectedRepositories.get( 0 );
|
||||
try
|
||||
{
|
||||
startDateInDF = getStartDateInDateFormat();
|
||||
endDateInDF = getEndDateInDateFormat();
|
||||
|
||||
if ( startDateInDF != null && endDateInDF != null && startDateInDF.after( endDateInDF ) )
|
||||
{
|
||||
addFieldError( "startDate", "Start Date must be earlier than the End Date" );
|
||||
return INPUT;
|
||||
}
|
||||
|
||||
List<RepositoryStatistics> stats = null;
|
||||
try
|
||||
{
|
||||
stats = repositoryStatisticsManager.getStatisticsInRange( metadataRepository, repositoryId,
|
||||
startDateInDF, endDateInDF );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to retrieve stats, assuming is empty: " + e.getMessage(), e );
|
||||
}
|
||||
if ( stats == null || stats.isEmpty() )
|
||||
{
|
||||
addActionError(
|
||||
"No statistics available for repository. Repository might not have been scanned." );
|
||||
return ERROR;
|
||||
}
|
||||
|
||||
input = new StringBuffer(
|
||||
"Date of Scan,Total File Count,Total Size,Artifact Count,Group Count,Project Count,Plugins," +
|
||||
"Archetypes,Jars,Wars\n" );
|
||||
|
||||
for ( RepositoryStatistics repositoryStats : stats )
|
||||
{
|
||||
input.append( repositoryStats.getScanStartTime() ).append( "," );
|
||||
input.append( repositoryStats.getTotalFileCount() ).append( "," );
|
||||
input.append( repositoryStats.getTotalArtifactFileSize() ).append( "," );
|
||||
input.append( repositoryStats.getTotalArtifactCount() ).append( "," );
|
||||
input.append( repositoryStats.getTotalGroupCount() ).append( "," );
|
||||
input.append( repositoryStats.getTotalProjectCount() ).append( "," );
|
||||
input.append( repositoryStats.getTotalCountForType( "maven-plugin" ) ).append( "," );
|
||||
input.append( repositoryStats.getTotalCountForType( "maven-archetype" ) ).append( "," );
|
||||
input.append( repositoryStats.getTotalCountForType( "jar" ) ).append( "," );
|
||||
input.append( repositoryStats.getTotalCountForType( "war" ) );
|
||||
input.append( "\n" );
|
||||
}
|
||||
|
||||
repositoryStatistics = stats;
|
||||
}
|
||||
catch ( ParseException pe )
|
||||
{
|
||||
addActionError( pe.getMessage() );
|
||||
return ERROR;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
addFieldError( "availableRepositories", "Please select a repository (or repositories) from the list." );
|
||||
return INPUT;
|
||||
}
|
||||
}
|
||||
else
|
||||
finally
|
||||
{
|
||||
addFieldError( "availableRepositories", "Please select a repository (or repositories) from the list." );
|
||||
return INPUT;
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
if ( repositoryStatistics.isEmpty() )
|
||||
|
@ -528,20 +546,29 @@ public class GenerateReportAction
|
|||
}
|
||||
|
||||
List<RepositoryProblemFacet> problemArtifacts = new ArrayList<RepositoryProblemFacet>();
|
||||
for ( String repoId : repoIds )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
// TODO: improve performance by navigating into a group subtree. Currently group is property, not part of name of item
|
||||
for ( String name : metadataRepository.getMetadataFacets( repoId, RepositoryProblemFacet.FACET_ID ) )
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
for ( String repoId : repoIds )
|
||||
{
|
||||
RepositoryProblemFacet metadataFacet = (RepositoryProblemFacet) metadataRepository.getMetadataFacet(
|
||||
repoId, RepositoryProblemFacet.FACET_ID, name );
|
||||
|
||||
if ( StringUtils.isEmpty( groupId ) || groupId.equals( metadataFacet.getNamespace() ) )
|
||||
// TODO: improve performance by navigating into a group subtree. Currently group is property, not part of name of item
|
||||
for ( String name : metadataRepository.getMetadataFacets( repoId, RepositoryProblemFacet.FACET_ID ) )
|
||||
{
|
||||
problemArtifacts.add( metadataFacet );
|
||||
RepositoryProblemFacet metadataFacet = (RepositoryProblemFacet) metadataRepository.getMetadataFacet(
|
||||
repoId, RepositoryProblemFacet.FACET_ID, name );
|
||||
|
||||
if ( StringUtils.isEmpty( groupId ) || groupId.equals( metadataFacet.getNamespace() ) )
|
||||
{
|
||||
problemArtifacts.add( metadataFacet );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
// TODO: getting range only after reading is not efficient for a large number of artifacts
|
||||
int lowerBound = ( page - 1 ) * rowCount;
|
||||
|
@ -727,9 +754,4 @@ public class GenerateReportAction
|
|||
{
|
||||
this.repositoryStatisticsManager = repositoryStatisticsManager;
|
||||
}
|
||||
|
||||
public void setMetadataRepository( MetadataRepository metadataRepository )
|
||||
{
|
||||
this.metadataRepository = metadataRepository;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.apache.maven.archiva.web.action.reports;
|
|||
import com.opensymphony.xwork2.Preparable;
|
||||
import org.apache.archiva.audit.AuditEvent;
|
||||
import org.apache.archiva.audit.AuditManager;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.lang.time.DateUtils;
|
||||
import org.apache.maven.archiva.security.AccessDeniedException;
|
||||
|
@ -119,7 +120,7 @@ public class ViewAuditLogReportAction
|
|||
this.request = request;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@SuppressWarnings( "unchecked" )
|
||||
public void prepare()
|
||||
throws Exception
|
||||
{
|
||||
|
@ -142,7 +143,15 @@ public class ViewAuditLogReportAction
|
|||
headerName = HEADER_RESULTS;
|
||||
}
|
||||
|
||||
auditLogs = auditManager.getMostRecentAuditEvents( repos );
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
auditLogs = auditManager.getMostRecentAuditEvents( repositorySession.getRepository(), repos );
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
}
|
||||
|
||||
public String execute()
|
||||
|
@ -205,7 +214,16 @@ public class ViewAuditLogReportAction
|
|||
}
|
||||
}
|
||||
|
||||
auditLogs = auditManager.getAuditEventsInRange( repos, resource, startDateInDF, endDateInDF );
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
auditLogs = auditManager.getAuditEventsInRange( repositorySession.getRepository(), repos, resource,
|
||||
startDateInDF, endDateInDF );
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
if ( auditLogs.isEmpty() )
|
||||
{
|
||||
|
|
|
@ -19,20 +19,11 @@ package org.apache.maven.archiva.web.rss;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServlet;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import com.sun.syndication.feed.synd.SyndFeed;
|
||||
import com.sun.syndication.io.FeedException;
|
||||
import com.sun.syndication.io.SyndFeedOutput;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.rss.processor.RssFeedProcessor;
|
||||
import org.apache.commons.codec.Decoder;
|
||||
import org.apache.commons.codec.DecoderException;
|
||||
|
@ -60,10 +51,19 @@ import org.slf4j.LoggerFactory;
|
|||
import org.springframework.web.context.WebApplicationContext;
|
||||
import org.springframework.web.context.support.WebApplicationContextUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServlet;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
/**
|
||||
* Servlet for handling rss feed requests.
|
||||
*
|
||||
* @version
|
||||
*/
|
||||
public class RssFeedServlet
|
||||
extends HttpServlet
|
||||
|
@ -87,18 +87,23 @@ public class RssFeedServlet
|
|||
private ServletAuthenticator servletAuth;
|
||||
|
||||
private HttpAuthenticator httpAuth;
|
||||
|
||||
|
||||
private RepositorySessionFactory repositorySessionFactory;
|
||||
|
||||
public void init( javax.servlet.ServletConfig servletConfig )
|
||||
throws ServletException
|
||||
{
|
||||
super.init( servletConfig );
|
||||
wac = WebApplicationContextUtils.getRequiredWebApplicationContext( servletConfig.getServletContext() );
|
||||
userRepositories =
|
||||
(UserRepositories) wac.getBean( PlexusToSpringUtils.buildSpringId( UserRepositories.class.getName() ) );
|
||||
servletAuth =
|
||||
(ServletAuthenticator) wac.getBean( PlexusToSpringUtils.buildSpringId( ServletAuthenticator.class.getName() ) );
|
||||
httpAuth =
|
||||
(HttpAuthenticator) wac.getBean( PlexusToSpringUtils.buildSpringId( HttpAuthenticator.ROLE, "basic" ) );
|
||||
userRepositories = (UserRepositories) wac.getBean( PlexusToSpringUtils.buildSpringId(
|
||||
UserRepositories.class.getName() ) );
|
||||
servletAuth = (ServletAuthenticator) wac.getBean( PlexusToSpringUtils.buildSpringId(
|
||||
ServletAuthenticator.class.getName() ) );
|
||||
httpAuth = (HttpAuthenticator) wac.getBean( PlexusToSpringUtils.buildSpringId( HttpAuthenticator.ROLE,
|
||||
"basic" ) );
|
||||
// TODO: what if there are other types?
|
||||
repositorySessionFactory = (RepositorySessionFactory) wac.getBean( PlexusToSpringUtils.buildSpringId(
|
||||
RepositorySessionFactory.class.getName() ) );
|
||||
}
|
||||
|
||||
public void doGet( HttpServletRequest req, HttpServletResponse res )
|
||||
|
@ -107,15 +112,15 @@ public class RssFeedServlet
|
|||
String repoId = null;
|
||||
String groupId = null;
|
||||
String artifactId = null;
|
||||
|
||||
String url = StringUtils.removeEnd( req.getRequestURL().toString(), "/" );
|
||||
if( StringUtils.countMatches( StringUtils.substringAfter( url, "feeds/" ), "/" ) > 0 )
|
||||
|
||||
String url = StringUtils.removeEnd( req.getRequestURL().toString(), "/" );
|
||||
if ( StringUtils.countMatches( StringUtils.substringAfter( url, "feeds/" ), "/" ) > 0 )
|
||||
{
|
||||
artifactId = StringUtils.substringAfterLast( url, "/" );
|
||||
groupId = StringUtils.substringBeforeLast( StringUtils.substringAfter( url, "feeds/" ), "/");
|
||||
groupId = StringUtils.substringBeforeLast( StringUtils.substringAfter( url, "feeds/" ), "/" );
|
||||
groupId = StringUtils.replaceChars( groupId, '/', '.' );
|
||||
}
|
||||
else if( StringUtils.countMatches( StringUtils.substringAfter( url, "feeds/" ), "/" ) == 0 )
|
||||
else if ( StringUtils.countMatches( StringUtils.substringAfter( url, "feeds/" ), "/" ) == 0 )
|
||||
{
|
||||
repoId = StringUtils.substringAfterLast( url, "/" );
|
||||
}
|
||||
|
@ -123,32 +128,28 @@ public class RssFeedServlet
|
|||
{
|
||||
res.sendError( HttpServletResponse.SC_BAD_REQUEST, "Invalid request url." );
|
||||
return;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
Map<String, String> map = new HashMap<String, String>();
|
||||
SyndFeed feed = null;
|
||||
|
||||
|
||||
if ( isAllowed( req, repoId, groupId, artifactId ) )
|
||||
{
|
||||
if ( repoId != null )
|
||||
{
|
||||
// new artifacts in repo feed request
|
||||
processor =
|
||||
(RssFeedProcessor) wac.getBean( PlexusToSpringUtils.buildSpringId(
|
||||
RssFeedProcessor.class.getName(),
|
||||
"new-artifacts" ) );
|
||||
processor = (RssFeedProcessor) wac.getBean( PlexusToSpringUtils.buildSpringId(
|
||||
RssFeedProcessor.class.getName(), "new-artifacts" ) );
|
||||
map.put( RssFeedProcessor.KEY_REPO_ID, repoId );
|
||||
}
|
||||
else if ( ( groupId != null ) && ( artifactId != null ) )
|
||||
{
|
||||
// TODO: this only works for guest - we could pass in the list of repos
|
||||
// new versions of artifact feed request
|
||||
processor =
|
||||
(RssFeedProcessor) wac.getBean( PlexusToSpringUtils.buildSpringId(
|
||||
RssFeedProcessor.class.getName(),
|
||||
"new-versions" ) );
|
||||
processor = (RssFeedProcessor) wac.getBean( PlexusToSpringUtils.buildSpringId(
|
||||
RssFeedProcessor.class.getName(), "new-versions" ) );
|
||||
map.put( RssFeedProcessor.KEY_GROUP_ID, groupId );
|
||||
map.put( RssFeedProcessor.KEY_ARTIFACT_ID, artifactId );
|
||||
}
|
||||
|
@ -159,22 +160,30 @@ public class RssFeedServlet
|
|||
return;
|
||||
}
|
||||
|
||||
feed = processor.process( map );
|
||||
if( feed == null )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
feed = processor.process( map, repositorySession.getRepository() );
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
if ( feed == null )
|
||||
{
|
||||
res.sendError( HttpServletResponse.SC_NO_CONTENT, "No information available." );
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
res.setContentType( MIME_TYPE );
|
||||
|
||||
|
||||
if ( repoId != null )
|
||||
{
|
||||
{
|
||||
feed.setLink( req.getRequestURL().toString() );
|
||||
}
|
||||
else if ( ( groupId != null ) && ( artifactId != null ) )
|
||||
{
|
||||
feed.setLink( req.getRequestURL().toString() );
|
||||
feed.setLink( req.getRequestURL().toString() );
|
||||
}
|
||||
|
||||
SyndFeedOutput output = new SyndFeedOutput();
|
||||
|
@ -186,11 +195,11 @@ public class RssFeedServlet
|
|||
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
|
||||
}
|
||||
catch ( AccountLockedException acce )
|
||||
{
|
||||
{
|
||||
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
|
||||
}
|
||||
catch ( AuthenticationException authe )
|
||||
{
|
||||
{
|
||||
log.debug( COULD_NOT_AUTHENTICATE_USER, authe );
|
||||
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
|
||||
}
|
||||
|
@ -200,7 +209,7 @@ public class RssFeedServlet
|
|||
res.sendError( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, COULD_NOT_GENERATE_FEED_ERROR );
|
||||
}
|
||||
catch ( MustChangePasswordException e )
|
||||
{
|
||||
{
|
||||
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, COULD_NOT_AUTHENTICATE_USER );
|
||||
}
|
||||
catch ( UnauthorizedException e )
|
||||
|
@ -208,24 +217,25 @@ public class RssFeedServlet
|
|||
log.debug( e.getMessage() );
|
||||
if ( repoId != null )
|
||||
{
|
||||
res.setHeader("WWW-Authenticate", "Basic realm=\"Repository Archiva Managed " + repoId + " Repository" );
|
||||
res.setHeader( "WWW-Authenticate",
|
||||
"Basic realm=\"Repository Archiva Managed " + repoId + " Repository" );
|
||||
}
|
||||
else
|
||||
{
|
||||
res.setHeader("WWW-Authenticate", "Basic realm=\"Artifact " + groupId + ":" + artifactId );
|
||||
res.setHeader( "WWW-Authenticate", "Basic realm=\"Artifact " + groupId + ":" + artifactId );
|
||||
}
|
||||
|
||||
|
||||
res.sendError( HttpServletResponse.SC_UNAUTHORIZED, USER_NOT_AUTHORIZED );
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Basic authentication.
|
||||
*
|
||||
*
|
||||
* @param req
|
||||
* @param repositoryId TODO
|
||||
* @param groupId TODO
|
||||
* @param artifactId TODO
|
||||
* @param groupId TODO
|
||||
* @param artifactId TODO
|
||||
* @return
|
||||
*/
|
||||
private boolean isAllowed( HttpServletRequest req, String repositoryId, String groupId, String artifactId )
|
||||
|
@ -287,20 +297,20 @@ public class RssFeedServlet
|
|||
AuthenticationResult result = httpAuth.getAuthenticationResult( req, null );
|
||||
SecuritySession securitySession = httpAuth.getSecuritySession( req.getSession( true ) );
|
||||
|
||||
if ( servletAuth.isAuthenticated( req, result )
|
||||
&& servletAuth.isAuthorized( req, securitySession, repoId,
|
||||
ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS ) )
|
||||
if ( servletAuth.isAuthenticated( req, result ) && servletAuth.isAuthorized( req, securitySession,
|
||||
repoId,
|
||||
ArchivaRoleConstants.OPERATION_REPOSITORY_ACCESS ) )
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch ( AuthorizationException e )
|
||||
{
|
||||
|
||||
|
||||
}
|
||||
catch ( UnauthorizedException e )
|
||||
{
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -7,6 +7,8 @@
|
|||
<bean id="loggerManager" class="org.codehaus.plexus.logging.slf4j.Slf4jLoggerManager"
|
||||
init-method="initialize"/>
|
||||
|
||||
<alias name="repositorySessionFactory#file" alias="repositorySessionFactory"/>
|
||||
|
||||
<bean name="wagon#http" class="org.apache.maven.wagon.providers.http.LightweightHttpWagon" scope="prototype">
|
||||
<property name="httpHeaders">
|
||||
<map>
|
||||
|
@ -40,8 +42,7 @@
|
|||
<bean name="searchService" lazy-init="true" scope="singleton"
|
||||
class="org.apache.archiva.web.xmlrpc.services.SearchServiceImpl">
|
||||
<constructor-arg ref="xmlRpcUserRepositories"/>
|
||||
<constructor-arg ref="metadataResolver"/>
|
||||
<constructor-arg ref="metadataRepository"/>
|
||||
<constructor-arg ref="repositorySessionFactory"/>
|
||||
<constructor-arg ref="nexusSearch"/>
|
||||
</bean>
|
||||
|
||||
|
@ -51,7 +52,7 @@
|
|||
<constructor-arg ref="archivaConfiguration"/>
|
||||
<constructor-arg ref="repositoryContentConsumers"/>
|
||||
<constructor-arg ref="repositoryContentFactory"/>
|
||||
<constructor-arg ref="metadataRepository"/>
|
||||
<constructor-arg ref="repositorySessionFactory"/>
|
||||
<constructor-arg ref="archivaTaskScheduler#repository"/>
|
||||
<constructor-arg>
|
||||
<bean class="org.apache.archiva.web.spring.RepositoryListenerFactoryBean"/>
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.archiva.metadata.model.ArtifactMetadata;
|
|||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
import org.apache.archiva.metadata.model.ProjectVersionReference;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolver;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
|
@ -48,24 +49,26 @@ public class TestMetadataResolver
|
|||
|
||||
private Map<String, Collection<String>> versionsInProject = new HashMap<String, Collection<String>>();
|
||||
|
||||
public ProjectVersionMetadata resolveProjectVersion( String repoId, String namespace, String projectId,
|
||||
String projectVersion )
|
||||
public ProjectVersionMetadata resolveProjectVersion( RepositorySession repositorySession, String repoId,
|
||||
String namespace, String projectId, String projectVersion )
|
||||
{
|
||||
return projectVersions.get( createMapKey( repoId, namespace, projectId, projectVersion ) );
|
||||
}
|
||||
|
||||
public Collection<ProjectVersionReference> resolveProjectReferences( String repoId, String namespace,
|
||||
public Collection<ProjectVersionReference> resolveProjectReferences( RepositorySession repositorySession,
|
||||
String repoId, String namespace,
|
||||
String projectId, String projectVersion )
|
||||
{
|
||||
return references.get( createMapKey( repoId, namespace, projectId, projectVersion ) );
|
||||
}
|
||||
|
||||
public Collection<String> resolveRootNamespaces( String repoId )
|
||||
public Collection<String> resolveRootNamespaces( RepositorySession repositorySession, String repoId )
|
||||
{
|
||||
return resolveNamespaces( repoId, null );
|
||||
return resolveNamespaces( repositorySession, repoId, null );
|
||||
}
|
||||
|
||||
public Collection<String> resolveNamespaces( String repoId, String baseNamespace )
|
||||
public Collection<String> resolveNamespaces( RepositorySession repositorySession, String repoId,
|
||||
String baseNamespace )
|
||||
{
|
||||
Set<String> namespaces = new LinkedHashSet<String>();
|
||||
int fromIndex = baseNamespace != null ? baseNamespace.length() + 1 : 0;
|
||||
|
@ -87,20 +90,21 @@ public class TestMetadataResolver
|
|||
return namespaces;
|
||||
}
|
||||
|
||||
public Collection<String> resolveProjects( String repoId, String namespace )
|
||||
public Collection<String> resolveProjects( RepositorySession repositorySession, String repoId, String namespace )
|
||||
{
|
||||
Collection<String> list = projectsInNamespace.get( namespace );
|
||||
return list != null ? list : Collections.<String>emptyList();
|
||||
}
|
||||
|
||||
public Collection<String> resolveProjectVersions( String repoId, String namespace, String projectId )
|
||||
public Collection<String> resolveProjectVersions( RepositorySession repositorySession, String repoId,
|
||||
String namespace, String projectId )
|
||||
{
|
||||
Collection<String> list = versionsInProject.get( namespace + ":" + projectId );
|
||||
return list != null ? list : Collections.<String>emptyList();
|
||||
}
|
||||
|
||||
public Collection<ArtifactMetadata> resolveArtifacts( String repoId, String namespace, String projectId,
|
||||
String projectVersion )
|
||||
public Collection<ArtifactMetadata> resolveArtifacts( RepositorySession repositorySession, String repoId,
|
||||
String namespace, String projectId, String projectVersion )
|
||||
{
|
||||
List<ArtifactMetadata> artifacts = this.artifacts.get( createMapKey( repoId, namespace, projectId,
|
||||
projectVersion ) );
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
package org.apache.archiva.metadata.repository.memory;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
|
||||
public class TestRepositorySessionFactory
|
||||
implements RepositorySessionFactory
|
||||
{
|
||||
private RepositorySession repositorySession;
|
||||
|
||||
public void setRepositorySession( RepositorySession repositorySession )
|
||||
{
|
||||
this.repositorySession = repositorySession;
|
||||
}
|
||||
|
||||
public RepositorySession createSession()
|
||||
{
|
||||
return repositorySession != null ? repositorySession : new RepositorySession( new TestMetadataRepository(),
|
||||
new TestMetadataResolver() );
|
||||
}
|
||||
}
|
|
@ -21,12 +21,18 @@ package org.apache.maven.archiva.web.action;
|
|||
|
||||
import com.opensymphony.xwork2.Action;
|
||||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.memory.TestMetadataResolver;
|
||||
import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class BrowseActionTest
|
||||
extends AbstractActionTestCase
|
||||
{
|
||||
|
@ -385,6 +391,10 @@ public class BrowseActionTest
|
|||
{
|
||||
super.setUp();
|
||||
action = (BrowseAction) lookup( Action.class, ACTION_HINT );
|
||||
metadataResolver = (TestMetadataResolver) action.getMetadataResolver();
|
||||
metadataResolver = new TestMetadataResolver();
|
||||
RepositorySession repositorySession = mock( RepositorySession.class );
|
||||
when( repositorySession.getResolver() ).thenReturn( metadataResolver );
|
||||
TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
|
||||
factory.setRepositorySession( repositorySession );
|
||||
}
|
||||
}
|
|
@ -22,6 +22,9 @@ package org.apache.maven.archiva.web.action;
|
|||
import com.opensymphony.xwork2.Action;
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
|
@ -35,7 +38,9 @@ import org.easymock.classextension.MockClassControl;
|
|||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class DeleteArtifactActionTest
|
||||
extends PlexusInSpringTestCase
|
||||
|
@ -74,17 +79,23 @@ public class DeleteArtifactActionTest
|
|||
assertNotNull( action );
|
||||
|
||||
configurationControl = MockControl.createControl( ArchivaConfiguration.class );
|
||||
configuration = ( ArchivaConfiguration ) configurationControl.getMock();
|
||||
configuration = (ArchivaConfiguration) configurationControl.getMock();
|
||||
|
||||
repositoryFactoryControl = MockClassControl.createControl( RepositoryContentFactory.class );
|
||||
repositoryFactory = ( RepositoryContentFactory ) repositoryFactoryControl.getMock();
|
||||
repositoryFactory = (RepositoryContentFactory) repositoryFactoryControl.getMock();
|
||||
|
||||
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
|
||||
metadataRepository = ( MetadataRepository ) metadataRepositoryControl.getMock();
|
||||
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
|
||||
|
||||
RepositorySession repositorySession = mock( RepositorySession.class );
|
||||
when( repositorySession.getRepository() ).thenReturn( metadataRepository );
|
||||
|
||||
TestRepositorySessionFactory repositorySessionFactory = (TestRepositorySessionFactory) lookup(
|
||||
RepositorySessionFactory.class );
|
||||
repositorySessionFactory.setRepositorySession( repositorySession );
|
||||
|
||||
action.setConfiguration( configuration );
|
||||
action.setRepositoryFactory( repositoryFactory );
|
||||
action.setMetadataRepository( metadataRepository );
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -92,7 +103,7 @@ public class DeleteArtifactActionTest
|
|||
throws Exception
|
||||
{
|
||||
action = null;
|
||||
|
||||
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
|
@ -117,8 +128,10 @@ public class DeleteArtifactActionTest
|
|||
repoContent.setRepository( config.findManagedRepositoryById( REPOSITORY_ID ) );
|
||||
|
||||
configurationControl.expectAndReturn( configuration.getConfiguration(), config );
|
||||
repositoryFactoryControl.expectAndReturn( repositoryFactory.getManagedRepositoryContent( REPOSITORY_ID ), repoContent );
|
||||
metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( REPOSITORY_ID, GROUP_ID, ARTIFACT_ID, VERSION ),
|
||||
repositoryFactoryControl.expectAndReturn( repositoryFactory.getManagedRepositoryContent( REPOSITORY_ID ),
|
||||
repoContent );
|
||||
metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( REPOSITORY_ID, GROUP_ID,
|
||||
ARTIFACT_ID, VERSION ),
|
||||
new ArrayList<ArtifactMetadata>() );
|
||||
|
||||
configurationControl.replay();
|
||||
|
@ -133,7 +146,7 @@ public class DeleteArtifactActionTest
|
|||
assertFalse( new File( artifactPath + ".jar" ).exists() );
|
||||
assertFalse( new File( artifactPath + ".jar.sha1" ).exists() );
|
||||
assertFalse( new File( artifactPath + ".jar.md5" ).exists() );
|
||||
|
||||
|
||||
assertFalse( new File( artifactPath + ".pom" ).exists() );
|
||||
assertFalse( new File( artifactPath + ".pom.sha1" ).exists() );
|
||||
assertFalse( new File( artifactPath + ".pom.md5" ).exists() );
|
||||
|
|
|
@ -28,6 +28,9 @@ import org.apache.archiva.indexer.search.SearchResults;
|
|||
import org.apache.archiva.indexer.util.SearchUtil;
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.security.UserRepositories;
|
||||
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
|
||||
|
@ -38,6 +41,9 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
@ -60,6 +66,8 @@ public class SearchActionTest
|
|||
|
||||
private static final String GUEST = "guest";
|
||||
|
||||
private RepositorySession session;
|
||||
|
||||
@Override
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
|
@ -68,28 +76,26 @@ public class SearchActionTest
|
|||
|
||||
action = new SearchAction();
|
||||
|
||||
session = mock( RepositorySession.class );
|
||||
TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
|
||||
factory.setRepositorySession( session );
|
||||
action.setRepositorySessionFactory( factory );
|
||||
|
||||
MockControl archivaConfigControl = MockControl.createControl( ArchivaConfiguration.class );
|
||||
ArchivaConfiguration archivaConfig = (ArchivaConfiguration) archivaConfigControl.getMock();
|
||||
|
||||
userReposControl = MockControl.createControl( UserRepositories.class );
|
||||
userRepos = ( UserRepositories ) userReposControl.getMock();
|
||||
userRepos = (UserRepositories) userReposControl.getMock();
|
||||
|
||||
searchControl = MockControl.createControl( RepositorySearch.class );
|
||||
searchControl.setDefaultMatcher( MockControl.ALWAYS_MATCHER );
|
||||
search = ( RepositorySearch ) searchControl.getMock();
|
||||
search = (RepositorySearch) searchControl.getMock();
|
||||
|
||||
action.setArchivaConfiguration( archivaConfig );
|
||||
action.setUserRepositories( userRepos );
|
||||
action.setNexusSearch( search );
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void tearDown()
|
||||
throws Exception
|
||||
{
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
// quick search...
|
||||
|
||||
public void testQuickSearch()
|
||||
|
@ -372,7 +378,7 @@ public class SearchActionTest
|
|||
|
||||
assertEquals( Action.INPUT, result );
|
||||
assertFalse( action.getActionErrors().isEmpty() );
|
||||
assertEquals( "No results found",( String ) action.getActionErrors().iterator().next() );
|
||||
assertEquals( "No results found", (String) action.getActionErrors().iterator().next() );
|
||||
|
||||
searchControl.verify();
|
||||
}
|
||||
|
@ -413,14 +419,14 @@ public class SearchActionTest
|
|||
|
||||
MockControl control = MockControl.createControl( MetadataRepository.class );
|
||||
MetadataRepository metadataRepository = (MetadataRepository) control.getMock();
|
||||
action.setMetadataRepository( metadataRepository );
|
||||
when( session.getRepository() ).thenReturn( metadataRepository );
|
||||
|
||||
ArtifactMetadata artifact = createArtifact( "archiva-configuration", "1.0" );
|
||||
control.expectAndReturn( metadataRepository.getArtifactsByChecksum( TEST_REPO, TEST_CHECKSUM ),
|
||||
Collections.singletonList( artifact ) );
|
||||
|
||||
userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ),
|
||||
Collections.singletonList( TEST_REPO ) );
|
||||
userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ), Collections.singletonList(
|
||||
TEST_REPO ) );
|
||||
|
||||
control.replay();
|
||||
userReposControl.replay();
|
||||
|
@ -441,14 +447,14 @@ public class SearchActionTest
|
|||
|
||||
MockControl control = MockControl.createControl( MetadataRepository.class );
|
||||
MetadataRepository metadataRepository = (MetadataRepository) control.getMock();
|
||||
action.setMetadataRepository( metadataRepository );
|
||||
when( session.getRepository() ).thenReturn( metadataRepository );
|
||||
|
||||
List<ArtifactMetadata> artifacts = Arrays.asList( createArtifact( "archiva-configuration", "1.0" ),
|
||||
createArtifact( "archiva-indexer", "1.0" ) );
|
||||
control.expectAndReturn( metadataRepository.getArtifactsByChecksum( TEST_REPO, TEST_CHECKSUM ), artifacts );
|
||||
|
||||
userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ),
|
||||
Collections.singletonList( TEST_REPO ) );
|
||||
userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ), Collections.singletonList(
|
||||
TEST_REPO ) );
|
||||
|
||||
control.replay();
|
||||
userReposControl.replay();
|
||||
|
@ -469,7 +475,7 @@ public class SearchActionTest
|
|||
|
||||
assertEquals( Action.INPUT, result );
|
||||
assertFalse( action.getActionErrors().isEmpty() );
|
||||
assertEquals( "Unable to search for a blank checksum", ( String ) action.getActionErrors().iterator().next() );
|
||||
assertEquals( "Unable to search for a blank checksum", (String) action.getActionErrors().iterator().next() );
|
||||
}
|
||||
|
||||
public void testFindArtifactNoResults()
|
||||
|
@ -479,13 +485,13 @@ public class SearchActionTest
|
|||
|
||||
MockControl control = MockControl.createControl( MetadataRepository.class );
|
||||
MetadataRepository metadataRepository = (MetadataRepository) control.getMock();
|
||||
action.setMetadataRepository( metadataRepository );
|
||||
when( session.getRepository() ).thenReturn( metadataRepository );
|
||||
|
||||
control.expectAndReturn( metadataRepository.getArtifactsByChecksum( TEST_REPO, TEST_CHECKSUM ),
|
||||
Collections.<ArtifactMetadata>emptyList() );
|
||||
|
||||
userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ),
|
||||
Collections.singletonList( TEST_REPO ) );
|
||||
userReposControl.expectAndReturn( userRepos.getObservableRepositoryIds( GUEST ), Collections.singletonList(
|
||||
TEST_REPO ) );
|
||||
|
||||
control.replay();
|
||||
userReposControl.replay();
|
||||
|
@ -493,7 +499,7 @@ public class SearchActionTest
|
|||
String result = action.findArtifact();
|
||||
assertEquals( Action.INPUT, result );
|
||||
assertFalse( action.getActionErrors().isEmpty() );
|
||||
assertEquals( "No results found", ( String )action.getActionErrors().iterator().next() );
|
||||
assertEquals( "No results found", (String) action.getActionErrors().iterator().next() );
|
||||
|
||||
control.verify();
|
||||
userReposControl.verify();
|
||||
|
|
|
@ -26,7 +26,10 @@ import org.apache.archiva.metadata.model.MailingList;
|
|||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
import org.apache.archiva.metadata.model.ProjectVersionReference;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.memory.TestMetadataResolver;
|
||||
import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.storage.maven2.MavenArtifactFacet;
|
||||
import org.apache.maven.archiva.common.utils.VersionUtil;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
|
@ -411,9 +414,6 @@ public class ShowArtifactActionTest
|
|||
action.setPropertyValue( "bar" );
|
||||
action.setRepositoryId( TEST_REPO );
|
||||
|
||||
MetadataRepository repo = mock( MetadataRepository.class );
|
||||
action.setMetadataRepository( repo );
|
||||
|
||||
String result = action.addMetadataProperty();
|
||||
|
||||
assertActionSuccess( action, result );
|
||||
|
@ -631,7 +631,15 @@ public class ShowArtifactActionTest
|
|||
{
|
||||
super.setUp();
|
||||
action = (ShowArtifactAction) lookup( Action.class, ACTION_HINT );
|
||||
metadataResolver = (TestMetadataResolver) action.getMetadataResolver();
|
||||
|
||||
metadataResolver = new TestMetadataResolver();
|
||||
MetadataRepository repo = mock( MetadataRepository.class );
|
||||
RepositorySession repositorySession = mock( RepositorySession.class );
|
||||
when( repositorySession.getResolver() ).thenReturn( metadataResolver );
|
||||
when( repositorySession.getRepository() ).thenReturn( repo );
|
||||
TestRepositorySessionFactory repositorySessionFactory = (TestRepositorySessionFactory) lookup(
|
||||
RepositorySessionFactory.class );
|
||||
repositorySessionFactory.setRepositorySession( repositorySession );
|
||||
|
||||
RepositoryContentFactory factory = mock( RepositoryContentFactory.class );
|
||||
action.setRepositoryFactory( factory );
|
||||
|
|
|
@ -23,7 +23,9 @@ import com.opensymphony.xwork2.Action;
|
|||
import org.apache.archiva.audit.AuditEvent;
|
||||
import org.apache.archiva.audit.AuditListener;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
|
@ -47,6 +49,9 @@ import java.util.Arrays;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
* DeleteManagedRepositoryActionTest
|
||||
*
|
||||
|
@ -73,6 +78,12 @@ public class DeleteManagedRepositoryActionTest
|
|||
|
||||
private RepositoryStatisticsManager repositoryStatisticsManager;
|
||||
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
private RepositorySession respositorySession;
|
||||
|
||||
private MockControl metadataRepositoryControl;
|
||||
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
{
|
||||
|
@ -93,10 +104,15 @@ public class DeleteManagedRepositoryActionTest
|
|||
repositoryStatisticsManager = (RepositoryStatisticsManager) repositoryStatisticsManagerControl.getMock();
|
||||
action.setRepositoryStatisticsManager( repositoryStatisticsManager );
|
||||
|
||||
MockControl metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
|
||||
MetadataRepository metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
|
||||
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
|
||||
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
|
||||
metadataRepository.removeRepository( REPO_ID );
|
||||
action.setMetadataRepository( metadataRepository );
|
||||
|
||||
respositorySession = mock( RepositorySession.class );
|
||||
when( respositorySession.getRepository() ).thenReturn( metadataRepository );
|
||||
TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
|
||||
factory.setRepositorySession( respositorySession );
|
||||
action.setRepositorySessionFactory( factory );
|
||||
|
||||
metadataRepositoryControl.replay();
|
||||
}
|
||||
|
@ -149,7 +165,7 @@ public class DeleteManagedRepositoryActionTest
|
|||
throws Exception
|
||||
{
|
||||
// even when we keep the content, we don't keep the metadata at this point
|
||||
repositoryStatisticsManager.deleteStatistics( REPO_ID );
|
||||
repositoryStatisticsManager.deleteStatistics( metadataRepository, REPO_ID );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
|
||||
prepareRoleManagerMock();
|
||||
|
@ -158,7 +174,7 @@ public class DeleteManagedRepositoryActionTest
|
|||
|
||||
MockControl control = mockAuditListeners();
|
||||
|
||||
MockControl metadataRepositoryControl = mockMetadataRepository();
|
||||
when( respositorySession.getRepository() ).thenReturn( metadataRepository );
|
||||
|
||||
String status = action.deleteEntry();
|
||||
|
||||
|
@ -173,17 +189,6 @@ public class DeleteManagedRepositoryActionTest
|
|||
metadataRepositoryControl.verify();
|
||||
}
|
||||
|
||||
private MockControl mockMetadataRepository()
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
MockControl metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
|
||||
MetadataRepository metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
|
||||
metadataRepository.removeRepository( REPO_ID );
|
||||
metadataRepositoryControl.replay();
|
||||
action.setMetadataRepository( metadataRepository );
|
||||
return metadataRepositoryControl;
|
||||
}
|
||||
|
||||
private MockControl mockAuditListeners()
|
||||
{
|
||||
MockControl control = MockControl.createControl( AuditListener.class );
|
||||
|
@ -198,7 +203,7 @@ public class DeleteManagedRepositoryActionTest
|
|||
public void testDeleteRepositoryDeleteContent()
|
||||
throws Exception
|
||||
{
|
||||
repositoryStatisticsManager.deleteStatistics( REPO_ID );
|
||||
repositoryStatisticsManager.deleteStatistics( metadataRepository, REPO_ID );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
|
||||
prepareRoleManagerMock();
|
||||
|
@ -207,7 +212,7 @@ public class DeleteManagedRepositoryActionTest
|
|||
|
||||
MockControl control = mockAuditListeners();
|
||||
|
||||
MockControl metadataRepositoryControl = mockMetadataRepository();
|
||||
when( respositorySession.getRepository() ).thenReturn( metadataRepository );
|
||||
|
||||
String status = action.deleteContents();
|
||||
|
||||
|
@ -225,7 +230,7 @@ public class DeleteManagedRepositoryActionTest
|
|||
public void testDeleteRepositoryAndAssociatedProxyConnectors()
|
||||
throws Exception
|
||||
{
|
||||
repositoryStatisticsManager.deleteStatistics( REPO_ID );
|
||||
repositoryStatisticsManager.deleteStatistics( metadataRepository, REPO_ID );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
|
||||
Configuration configuration = prepDeletionTest( createRepository(), 5 );
|
||||
|
@ -238,7 +243,7 @@ public class DeleteManagedRepositoryActionTest
|
|||
assertEquals( 1, configuration.getProxyConnectors().size() );
|
||||
|
||||
MockControl control = mockAuditListeners();
|
||||
MockControl metadataRepositoryControl = mockMetadataRepository();
|
||||
when( respositorySession.getRepository() ).thenReturn( metadataRepository );
|
||||
String status = action.deleteContents();
|
||||
|
||||
assertEquals( Action.SUCCESS, status );
|
||||
|
|
|
@ -19,11 +19,11 @@ package org.apache.maven.archiva.web.action.admin.repositories;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import com.opensymphony.xwork2.Action;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
|
@ -36,6 +36,13 @@ import org.codehaus.redback.integration.interceptor.SecureActionBundle;
|
|||
import org.codehaus.redback.integration.interceptor.SecureActionException;
|
||||
import org.easymock.MockControl;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
* EditManagedRepositoryActionTest
|
||||
*
|
||||
|
@ -58,6 +65,8 @@ public class EditManagedRepositoryActionTest
|
|||
|
||||
private File location;
|
||||
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
protected void setUp()
|
||||
throws Exception
|
||||
{
|
||||
|
@ -73,6 +82,13 @@ public class EditManagedRepositoryActionTest
|
|||
roleManager = (RoleManager) roleManagerControl.getMock();
|
||||
action.setRoleManager( roleManager );
|
||||
location = getTestFile( "target/test/location" );
|
||||
|
||||
metadataRepository = mock( MetadataRepository.class );
|
||||
RepositorySession repositorySession = mock( RepositorySession.class );
|
||||
when( repositorySession.getRepository() ).thenReturn( metadataRepository );
|
||||
TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
|
||||
factory.setRepositorySession( repositorySession );
|
||||
action.setRepositorySessionFactory( factory );
|
||||
}
|
||||
|
||||
public void testSecureActionBundle()
|
||||
|
@ -121,7 +137,7 @@ public class EditManagedRepositoryActionTest
|
|||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID );
|
||||
roleManagerControl.setReturnValue( false );
|
||||
|
||||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID +"-stage" );
|
||||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID + "-stage" );
|
||||
roleManagerControl.setReturnValue( false );
|
||||
|
||||
roleManager.createTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID );
|
||||
|
@ -129,7 +145,7 @@ public class EditManagedRepositoryActionTest
|
|||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID );
|
||||
roleManagerControl.setReturnValue( false );
|
||||
|
||||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID +"-stage");
|
||||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID + "-stage" );
|
||||
roleManagerControl.setReturnValue( false );
|
||||
|
||||
roleManager.createTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID );
|
||||
|
@ -184,7 +200,7 @@ public class EditManagedRepositoryActionTest
|
|||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID );
|
||||
roleManagerControl.setReturnValue( false );
|
||||
|
||||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID +"-stage");
|
||||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID + "-stage" );
|
||||
roleManagerControl.setReturnValue( false );
|
||||
|
||||
roleManager.createTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_OBSERVER, REPO_ID );
|
||||
|
@ -192,7 +208,7 @@ public class EditManagedRepositoryActionTest
|
|||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID );
|
||||
roleManagerControl.setReturnValue( false );
|
||||
|
||||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID +"-stage");
|
||||
roleManager.templatedRoleExists( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID + "-stage" );
|
||||
roleManagerControl.setReturnValue( false );
|
||||
|
||||
roleManager.createTemplatedRole( ArchivaRoleConstants.TEMPLATE_REPOSITORY_MANAGER, REPO_ID );
|
||||
|
@ -207,7 +223,6 @@ public class EditManagedRepositoryActionTest
|
|||
stageRepoConfiguration.addManagedRepository( createStagingRepository() );
|
||||
archivaConfigurationControl.setReturnValue( stageRepoConfiguration );
|
||||
|
||||
|
||||
archivaConfigurationControl.setReturnValue( configuration );
|
||||
archivaConfigurationControl.setReturnValue( configuration );
|
||||
|
||||
|
@ -219,7 +234,7 @@ public class EditManagedRepositoryActionTest
|
|||
RepositoryStatisticsManager repositoryStatisticsManager =
|
||||
(RepositoryStatisticsManager) repositoryStatisticsManagerControl.getMock();
|
||||
action.setRepositoryStatisticsManager( repositoryStatisticsManager );
|
||||
repositoryStatisticsManager.deleteStatistics( REPO_ID );
|
||||
repositoryStatisticsManager.deleteStatistics( metadataRepository, REPO_ID );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
|
||||
action.setRepoid( REPO_ID );
|
||||
|
@ -300,10 +315,11 @@ public class EditManagedRepositoryActionTest
|
|||
repository.setScanned( false );
|
||||
repository.setDeleteReleasedSnapshots( true );
|
||||
}
|
||||
|
||||
private void populateStagingRepository( ManagedRepositoryConfiguration repository )
|
||||
throws IOException
|
||||
{
|
||||
repository.setId( REPO_ID + "-stage");
|
||||
repository.setId( REPO_ID + "-stage" );
|
||||
repository.setName( "repo name" );
|
||||
repository.setLocation( location.getCanonicalPath() );
|
||||
repository.setLayout( "default" );
|
||||
|
|
|
@ -19,20 +19,24 @@ package org.apache.maven.archiva.web.action.admin.repositories;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import com.meterware.servletunit.ServletRunner;
|
||||
import com.meterware.servletunit.ServletUnitClient;
|
||||
import com.opensymphony.xwork2.Action;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.stats.DefaultRepositoryStatisticsManager;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
|
||||
import org.codehaus.redback.integration.interceptor.SecureActionBundle;
|
||||
import org.codehaus.redback.integration.interceptor.SecureActionException;
|
||||
import org.easymock.MockControl;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
* Test the repositories action returns the correct data.
|
||||
*/
|
||||
|
@ -46,7 +50,16 @@ public class RepositoriesActionTest
|
|||
{
|
||||
super.setUp();
|
||||
|
||||
action = (RepositoriesAction) lookup( Action.class.getName(), "repositoriesAction" );
|
||||
try
|
||||
{
|
||||
action = (RepositoriesAction) lookup( Action.class.getName(), "repositoriesAction" );
|
||||
}
|
||||
catch ( Exception e )
|
||||
{
|
||||
// clean up cache - TODO: move handling to plexus-spring
|
||||
applicationContext.close();
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetRepositories()
|
||||
|
@ -56,19 +69,20 @@ public class RepositoriesActionTest
|
|||
MetadataRepository metadataRepository = (MetadataRepository) control.getMock();
|
||||
control.expectAndReturn( metadataRepository.getMetadataFacets( "internal", RepositoryStatistics.FACET_ID ),
|
||||
Arrays.asList( "20091125.123456.678" ) );
|
||||
control.expectAndReturn(
|
||||
metadataRepository.getMetadataFacet( "internal", RepositoryStatistics.FACET_ID, "20091125.123456.678" ),
|
||||
new RepositoryStatistics() );
|
||||
control.expectAndReturn( metadataRepository.getMetadataFacet( "internal", RepositoryStatistics.FACET_ID,
|
||||
"20091125.123456.678" ),
|
||||
new RepositoryStatistics() );
|
||||
control.expectAndReturn( metadataRepository.getMetadataFacets( "snapshots", RepositoryStatistics.FACET_ID ),
|
||||
Arrays.asList( "20091112.012345.012" ) );
|
||||
control.expectAndReturn(
|
||||
metadataRepository.getMetadataFacet( "snapshots", RepositoryStatistics.FACET_ID, "20091112.012345.012" ),
|
||||
new RepositoryStatistics() );
|
||||
control.expectAndReturn( metadataRepository.getMetadataFacet( "snapshots", RepositoryStatistics.FACET_ID,
|
||||
"20091112.012345.012" ),
|
||||
new RepositoryStatistics() );
|
||||
control.replay();
|
||||
|
||||
DefaultRepositoryStatisticsManager statsManager =
|
||||
(DefaultRepositoryStatisticsManager) lookup( RepositoryStatisticsManager.class );
|
||||
statsManager.setMetadataRepository( metadataRepository );
|
||||
RepositorySession session = mock( RepositorySession.class );
|
||||
when( session.getRepository() ).thenReturn( metadataRepository );
|
||||
TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
|
||||
factory.setRepositorySession( session );
|
||||
|
||||
ServletRunner sr = new ServletRunner();
|
||||
ServletUnitClient sc = sr.newClient();
|
||||
|
|
|
@ -22,6 +22,9 @@ package org.apache.maven.archiva.web.action.reports;
|
|||
import com.opensymphony.xwork2.Action;
|
||||
import org.apache.archiva.metadata.model.MetadataFacet;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatistics;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
import org.apache.archiva.reports.RepositoryProblemFacet;
|
||||
|
@ -36,6 +39,9 @@ import java.util.Collections;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
* Test the GenerationReportAction. Note that we are testing for <i>current</i> behaviour, however there are several
|
||||
* instances below where other behaviour may actually be more appropriate (eg the error handling, download stats should
|
||||
|
@ -68,7 +74,16 @@ public class GenerateReportActionTest
|
|||
{
|
||||
super.setUp();
|
||||
|
||||
action = (GenerateReportAction) lookup( Action.class, "generateReport" );
|
||||
try
|
||||
{
|
||||
action = (GenerateReportAction) lookup( Action.class, "generateReport" );
|
||||
}
|
||||
catch ( Exception e )
|
||||
{
|
||||
// clean up cache - TODO: move handling to plexus-spring
|
||||
applicationContext.close();
|
||||
throw e;
|
||||
}
|
||||
|
||||
repositoryStatisticsManagerControl = MockControl.createControl( RepositoryStatisticsManager.class );
|
||||
repositoryStatisticsManager = (RepositoryStatisticsManager) repositoryStatisticsManagerControl.getMock();
|
||||
|
@ -76,7 +91,12 @@ public class GenerateReportActionTest
|
|||
|
||||
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
|
||||
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
|
||||
action.setMetadataRepository( metadataRepository );
|
||||
|
||||
RepositorySession repositorySession = mock( RepositorySession.class );
|
||||
when( repositorySession.getRepository() ).thenReturn( metadataRepository );
|
||||
|
||||
TestRepositorySessionFactory factory = (TestRepositorySessionFactory) lookup( RepositorySessionFactory.class );
|
||||
factory.setRepositorySession( repositorySession );
|
||||
}
|
||||
|
||||
private void prepareAction( List<String> selectedRepositories, List<String> availableRepositories )
|
||||
|
@ -169,10 +189,8 @@ public class GenerateReportActionTest
|
|||
public void testGenerateStatisticsSingleRepo()
|
||||
throws Exception
|
||||
{
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
|
||||
null,
|
||||
null ),
|
||||
Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, INTERNAL, null, null ), Collections.singletonList( createDefaultStats() ) );
|
||||
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
prepareAction( Collections.singletonList( INTERNAL ), Collections.singletonList( SNAPSHOTS ) );
|
||||
|
@ -186,10 +204,8 @@ public class GenerateReportActionTest
|
|||
throws Exception
|
||||
|
||||
{
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
|
||||
null,
|
||||
null ),
|
||||
Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, INTERNAL, null, null ), Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
prepareAction( Collections.singletonList( INTERNAL ), Collections.singletonList( SNAPSHOTS ) );
|
||||
|
||||
|
@ -204,10 +220,8 @@ public class GenerateReportActionTest
|
|||
throws Exception
|
||||
|
||||
{
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
|
||||
null,
|
||||
null ),
|
||||
Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, INTERNAL, null, null ), Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
action.setPage( 2 );
|
||||
prepareAction( Collections.singletonList( INTERNAL ), Collections.singletonList( SNAPSHOTS ) );
|
||||
|
@ -222,14 +236,10 @@ public class GenerateReportActionTest
|
|||
throws Exception
|
||||
|
||||
{
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
|
||||
null,
|
||||
null ),
|
||||
Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
|
||||
null,
|
||||
null ),
|
||||
Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, SNAPSHOTS, null, null ), Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, INTERNAL, null, null ), Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
prepareAction( Arrays.asList( SNAPSHOTS, INTERNAL ), Collections.<String>emptyList() );
|
||||
|
||||
|
@ -246,14 +256,10 @@ public class GenerateReportActionTest
|
|||
throws Exception
|
||||
|
||||
{
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
|
||||
null,
|
||||
null ),
|
||||
Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
|
||||
null,
|
||||
null ),
|
||||
Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, SNAPSHOTS, null, null ), Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, INTERNAL, null, null ), Collections.singletonList( createDefaultStats() ) );
|
||||
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
prepareAction( Arrays.asList( SNAPSHOTS, INTERNAL ), Collections.<String>emptyList() );
|
||||
|
@ -267,10 +273,8 @@ public class GenerateReportActionTest
|
|||
throws Exception
|
||||
{
|
||||
Date date = new Date();
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
|
||||
null,
|
||||
null ),
|
||||
Collections.singletonList( createStats( date ) ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, SNAPSHOTS, null, null ), Collections.singletonList( createStats( date ) ) );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
|
||||
prepareAction( Arrays.asList( SNAPSHOTS ), Arrays.asList( INTERNAL ) );
|
||||
|
@ -289,14 +293,10 @@ public class GenerateReportActionTest
|
|||
public void testDownloadStatisticsMultipleRepos()
|
||||
throws Exception
|
||||
{
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
|
||||
null,
|
||||
null ),
|
||||
Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
|
||||
null,
|
||||
null ),
|
||||
Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, SNAPSHOTS, null, null ), Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, INTERNAL, null, null ), Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
prepareAction( Arrays.asList( SNAPSHOTS, INTERNAL ), Collections.<String>emptyList() );
|
||||
|
||||
|
@ -367,10 +367,8 @@ public class GenerateReportActionTest
|
|||
throws Exception
|
||||
|
||||
{
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
|
||||
null,
|
||||
null ),
|
||||
Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, INTERNAL, null, null ), Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
prepareAction( Collections.singletonList( INTERNAL ), Collections.singletonList( SNAPSHOTS ) );
|
||||
|
||||
|
@ -395,14 +393,10 @@ public class GenerateReportActionTest
|
|||
throws Exception
|
||||
|
||||
{
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
|
||||
null,
|
||||
null ),
|
||||
Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
|
||||
null,
|
||||
null ),
|
||||
Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, SNAPSHOTS, null, null ), Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, INTERNAL, null, null ), Collections.<Object>emptyList() );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
prepareAction( Arrays.asList( SNAPSHOTS, INTERNAL ), Collections.<String>emptyList() );
|
||||
|
||||
|
@ -417,14 +411,10 @@ public class GenerateReportActionTest
|
|||
public void testDownloadStatisticsMultipleRepoInStrutsFormat()
|
||||
throws Exception
|
||||
{
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( SNAPSHOTS,
|
||||
null,
|
||||
null ),
|
||||
Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange( INTERNAL,
|
||||
null,
|
||||
null ),
|
||||
Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, SNAPSHOTS, null, null ), Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.expectAndReturn( repositoryStatisticsManager.getStatisticsInRange(
|
||||
metadataRepository, INTERNAL, null, null ), Collections.singletonList( createDefaultStats() ) );
|
||||
repositoryStatisticsManagerControl.replay();
|
||||
prepareAction( Arrays.asList( SNAPSHOTS, INTERNAL ), Collections.<String>emptyList() );
|
||||
|
||||
|
|
|
@ -19,8 +19,6 @@ package org.apache.maven.archiva.web.rss;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import com.meterware.httpunit.GetMethodWebRequest;
|
||||
import com.meterware.httpunit.HttpException;
|
||||
import com.meterware.httpunit.WebRequest;
|
||||
|
@ -32,6 +30,8 @@ import org.apache.commons.codec.binary.Base64;
|
|||
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
|
||||
import sun.misc.BASE64Encoder;
|
||||
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
public class RssFeedServletTest
|
||||
extends PlexusInSpringTestCase
|
||||
{
|
||||
|
@ -49,16 +49,16 @@ public class RssFeedServletTest
|
|||
public void testRetrieveServlet()
|
||||
throws Exception
|
||||
{
|
||||
RssFeedServlet servlet =
|
||||
(RssFeedServlet) client.newInvocation( "http://localhost/feeds/test-repo" ).getServlet();
|
||||
RssFeedServlet servlet = (RssFeedServlet) client.newInvocation(
|
||||
"http://localhost/feeds/test-repo" ).getServlet();
|
||||
assertNotNull( servlet );
|
||||
}
|
||||
|
||||
public void testRequestNewArtifactsInRepo()
|
||||
throws Exception
|
||||
{
|
||||
RssFeedServlet servlet =
|
||||
(RssFeedServlet) client.newInvocation( "http://localhost/feeds/test-repo" ).getServlet();
|
||||
RssFeedServlet servlet = (RssFeedServlet) client.newInvocation(
|
||||
"http://localhost/feeds/test-repo" ).getServlet();
|
||||
assertNotNull( servlet );
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://localhost/feeds/test-repo" );
|
||||
|
@ -97,8 +97,8 @@ public class RssFeedServletTest
|
|||
public void XXX_testInvalidRequest()
|
||||
throws Exception
|
||||
{
|
||||
RssFeedServlet servlet =
|
||||
(RssFeedServlet) client.newInvocation( "http://localhost/feeds?invalid_param=xxx" ).getServlet();
|
||||
RssFeedServlet servlet = (RssFeedServlet) client.newInvocation(
|
||||
"http://localhost/feeds?invalid_param=xxx" ).getServlet();
|
||||
assertNotNull( servlet );
|
||||
|
||||
try
|
||||
|
@ -116,8 +116,8 @@ public class RssFeedServletTest
|
|||
public void XXX_testInvalidAuthenticationRequest()
|
||||
throws Exception
|
||||
{
|
||||
RssFeedServlet servlet =
|
||||
(RssFeedServlet) client.newInvocation( "http://localhost/feeds/unauthorized-repo" ).getServlet();
|
||||
RssFeedServlet servlet = (RssFeedServlet) client.newInvocation(
|
||||
"http://localhost/feeds/unauthorized-repo" ).getServlet();
|
||||
assertNotNull( servlet );
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://localhost/feeds/unauthorized-repo" );
|
||||
|
@ -142,8 +142,8 @@ public class RssFeedServletTest
|
|||
public void XXX_testUnauthorizedRequest()
|
||||
throws Exception
|
||||
{
|
||||
RssFeedServlet servlet =
|
||||
(RssFeedServlet) client.newInvocation( "http://localhost/feeds/unauthorized-repo" ).getServlet();
|
||||
RssFeedServlet servlet = (RssFeedServlet) client.newInvocation(
|
||||
"http://localhost/feeds/unauthorized-repo" ).getServlet();
|
||||
assertNotNull( servlet );
|
||||
|
||||
WebRequest request = new GetMethodWebRequest( "http://localhost/feeds/unauthorized-repo" );
|
||||
|
|
|
@ -19,11 +19,12 @@ package org.apache.maven.archiva.web.tags;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolver;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.memory.TestMetadataResolver;
|
||||
import org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory;
|
||||
import org.apache.maven.archiva.common.ArchivaException;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
|
@ -32,6 +33,11 @@ import org.apache.maven.artifact.Artifact;
|
|||
import org.apache.maven.artifact.factory.ArtifactFactory;
|
||||
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class DependencyTreeTest
|
||||
extends PlexusInSpringTestCase
|
||||
{
|
||||
|
@ -70,6 +76,12 @@ public class DependencyTreeTest
|
|||
ProjectVersionMetadata metadata = new ProjectVersionMetadata();
|
||||
metadata.setId( TEST_VERSION );
|
||||
metadataResolver.setProjectVersion( TEST_REPO_ID, TEST_GROUP_ID, TEST_ARTIFACT_ID, metadata );
|
||||
|
||||
RepositorySession repositorySession = mock( RepositorySession.class );
|
||||
when( repositorySession.getResolver() ).thenReturn( metadataResolver );
|
||||
TestRepositorySessionFactory repositorySessionFactory = (TestRepositorySessionFactory) lookup(
|
||||
RepositorySessionFactory.class );
|
||||
repositorySessionFactory.setRepositorySession( repositorySession );
|
||||
}
|
||||
|
||||
public void testTree()
|
||||
|
|
|
@ -29,5 +29,10 @@
|
|||
<resource>archiva-mime-types.txt</resource>
|
||||
</configuration>
|
||||
</component>
|
||||
<component>
|
||||
<role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
|
||||
<role-hint>default</role-hint>
|
||||
<implementation>org.apache.archiva.metadata.repository.memory.TestRepositorySessionFactory</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
|
@ -24,11 +24,5 @@
|
|||
<role-hint>default</role-hint>
|
||||
<implementation>org.apache.maven.archiva.security.UserRepositoriesStub</implementation>
|
||||
</component>
|
||||
<component>
|
||||
<role>org.apache.archiva.metadata.repository.MetadataResolver</role>
|
||||
<role-hint>default</role-hint>
|
||||
<implementation>org.apache.archiva.metadata.repository.memory.TestMetadataResolver</implementation>
|
||||
<instantiation-strategy>per-lookup</instantiation-strategy>
|
||||
</component>
|
||||
</components>
|
||||
</plexus>
|
||||
|
|
|
@ -24,11 +24,5 @@
|
|||
<role-hint>default</role-hint>
|
||||
<implementation>org.apache.maven.archiva.security.UserRepositoriesStub</implementation>
|
||||
</component>
|
||||
<component>
|
||||
<role>org.apache.archiva.metadata.repository.MetadataResolver</role>
|
||||
<role-hint>default</role-hint>
|
||||
<implementation>org.apache.archiva.metadata.repository.memory.TestMetadataResolver</implementation>
|
||||
<instantiation-strategy>per-lookup</instantiation-strategy>
|
||||
</component>
|
||||
</components>
|
||||
</plexus>
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
package org.apache.archiva.metadata.repository;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
public class TestRepositorySessionFactory
|
||||
implements RepositorySessionFactory
|
||||
{
|
||||
private MetadataResolver resolver;
|
||||
|
||||
public RepositorySession createSession()
|
||||
{
|
||||
return new RepositorySession( new TestMetadataRepository(), resolver );
|
||||
}
|
||||
|
||||
public void setResolver( MetadataResolver resolver )
|
||||
{
|
||||
this.resolver = resolver;
|
||||
}
|
||||
}
|
|
@ -34,5 +34,9 @@
|
|||
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
|
||||
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
|
||||
</component>
|
||||
<component>
|
||||
<role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
|
||||
<implementation>org.apache.archiva.metadata.repository.TestRepositorySessionFactory</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
|
@ -18,7 +18,8 @@
|
|||
~ under the License.
|
||||
-->
|
||||
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>org.apache.archiva</groupId>
|
||||
|
@ -49,7 +50,7 @@
|
|||
<dependency>
|
||||
<groupId>org.apache.archiva</groupId>
|
||||
<artifactId>audit</artifactId>
|
||||
</dependency>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.archiva</groupId>
|
||||
<artifactId>archiva-repository-scanner</artifactId>
|
||||
|
@ -83,6 +84,11 @@
|
|||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-simple</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
|
|
|
@ -23,6 +23,8 @@ import org.apache.archiva.audit.AuditEvent;
|
|||
import org.apache.archiva.audit.AuditListener;
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.filter.Filter;
|
||||
import org.apache.archiva.metadata.repository.filter.IncludesFilter;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
|
@ -84,8 +86,6 @@ public class AdministrationServiceImpl
|
|||
|
||||
private Collection<RepositoryListener> listeners;
|
||||
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
private RepositoryStatisticsManager repositoryStatisticsManager;
|
||||
|
||||
private RepositoryMerger repositoryMerger;
|
||||
|
@ -94,8 +94,11 @@ public class AdministrationServiceImpl
|
|||
|
||||
private AuditListener auditListener;
|
||||
|
||||
private RepositorySessionFactory repositorySessionFactory;
|
||||
|
||||
public AdministrationServiceImpl( ArchivaConfiguration archivaConfig, RepositoryContentConsumers repoConsumersUtil,
|
||||
RepositoryContentFactory repoFactory, MetadataRepository metadataRepository,
|
||||
RepositoryContentFactory repoFactory,
|
||||
RepositorySessionFactory repositorySessionFactory,
|
||||
RepositoryArchivaTaskScheduler repositoryTaskScheduler,
|
||||
Collection<RepositoryListener> listeners,
|
||||
RepositoryStatisticsManager repositoryStatisticsManager,
|
||||
|
@ -106,7 +109,7 @@ public class AdministrationServiceImpl
|
|||
this.repoFactory = repoFactory;
|
||||
this.repositoryTaskScheduler = repositoryTaskScheduler;
|
||||
this.listeners = listeners;
|
||||
this.metadataRepository = metadataRepository;
|
||||
this.repositorySessionFactory = repositorySessionFactory;
|
||||
this.repositoryStatisticsManager = repositoryStatisticsManager;
|
||||
this.repositoryMerger = repositoryMerger;
|
||||
this.auditListener = auditListener;
|
||||
|
@ -186,6 +189,7 @@ public class AdministrationServiceImpl
|
|||
throw new Exception( "Repository does not exist." );
|
||||
}
|
||||
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
ManagedRepositoryContent repoContent = repoFactory.getManagedRepositoryContent( repoId );
|
||||
|
@ -197,6 +201,7 @@ public class AdministrationServiceImpl
|
|||
// delete from file system
|
||||
repoContent.deleteVersion( ref );
|
||||
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
Collection<ArtifactMetadata> artifacts = metadataRepository.getArtifacts( repoId, groupId, artifactId,
|
||||
version );
|
||||
|
||||
|
@ -212,11 +217,12 @@ public class AdministrationServiceImpl
|
|||
// repository metadata to an artifact
|
||||
for ( RepositoryListener listener : listeners )
|
||||
{
|
||||
listener.deleteArtifact( repoId, artifact.getNamespace(), artifact.getProject(),
|
||||
artifact.getVersion(), artifact.getId() );
|
||||
listener.deleteArtifact( metadataRepository, repoId, artifact.getNamespace(),
|
||||
artifact.getProject(), artifact.getVersion(), artifact.getId() );
|
||||
}
|
||||
}
|
||||
}
|
||||
repositorySession.save();
|
||||
}
|
||||
catch ( ContentNotFoundException e )
|
||||
{
|
||||
|
@ -230,6 +236,10 @@ public class AdministrationServiceImpl
|
|||
{
|
||||
throw new Exception( "Repository exception occurred." );
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -407,8 +417,18 @@ public class AdministrationServiceImpl
|
|||
throw new Exception( "A repository with that id does not exist" );
|
||||
}
|
||||
|
||||
metadataRepository.removeRepository( repository.getId() );
|
||||
repositoryStatisticsManager.deleteStatistics( repository.getId() );
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
metadataRepository.removeRepository( repository.getId() );
|
||||
repositoryStatisticsManager.deleteStatistics( metadataRepository, repository.getId() );
|
||||
repositorySession.save();
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
config.removeManagedRepository( repository );
|
||||
|
||||
try
|
||||
|
@ -476,109 +496,117 @@ public class AdministrationServiceImpl
|
|||
|
||||
log.debug( "Retrieved repository configuration for repo '" + repoId + "'" );
|
||||
|
||||
if ( repoConfig != null )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
stagingConfig = config.findManagedRepositoryById( stagingId );
|
||||
|
||||
if ( stagingConfig != null )
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
if ( repoConfig != null )
|
||||
{
|
||||
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( stagingId );
|
||||
stagingConfig = config.findManagedRepositoryById( stagingId );
|
||||
|
||||
if ( repoConfig.isReleases() && !repoConfig.isSnapshots() )
|
||||
if ( stagingConfig != null )
|
||||
{
|
||||
log.info( "Repository to be merged contains releases only.." );
|
||||
if ( skipConflicts )
|
||||
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( stagingId );
|
||||
|
||||
if ( repoConfig.isReleases() && !repoConfig.isSnapshots() )
|
||||
{
|
||||
List<ArtifactMetadata> conflicts = repositoryMerger.getConflictingArtifacts( repoId,
|
||||
stagingId );
|
||||
|
||||
if ( log.isDebugEnabled() )
|
||||
log.info( "Repository to be merged contains releases only.." );
|
||||
if ( skipConflicts )
|
||||
{
|
||||
log.debug( "Artifacts in conflict.." );
|
||||
for ( ArtifactMetadata metadata : conflicts )
|
||||
List<ArtifactMetadata> conflicts = repositoryMerger.getConflictingArtifacts(
|
||||
metadataRepository, repoId, stagingId );
|
||||
|
||||
if ( log.isDebugEnabled() )
|
||||
{
|
||||
log.debug( metadata.getNamespace() + ":" + metadata.getProject() + ":" +
|
||||
metadata.getProjectVersion() );
|
||||
log.debug( "Artifacts in conflict.." );
|
||||
for ( ArtifactMetadata metadata : conflicts )
|
||||
{
|
||||
log.debug( metadata.getNamespace() + ":" + metadata.getProject() + ":" +
|
||||
metadata.getProjectVersion() );
|
||||
}
|
||||
}
|
||||
|
||||
sourceArtifacts.removeAll( conflicts );
|
||||
|
||||
log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
|
||||
mergeWithOutSnapshots( sourceArtifacts, stagingId, repoId, null );
|
||||
}
|
||||
else
|
||||
{
|
||||
log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
|
||||
mergeWithOutSnapshots( sourceArtifacts, stagingId, repoId, null );
|
||||
}
|
||||
|
||||
sourceArtifacts.removeAll( conflicts );
|
||||
|
||||
log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
|
||||
mergeWithOutSnapshots( sourceArtifacts, stagingId, repoId );
|
||||
}
|
||||
else
|
||||
{
|
||||
log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
|
||||
mergeWithOutSnapshots( sourceArtifacts, stagingId, repoId );
|
||||
log.info( "Repository to be merged has snapshot artifacts.." );
|
||||
if ( skipConflicts )
|
||||
{
|
||||
List<ArtifactMetadata> conflicts = repositoryMerger.getConflictingArtifacts(
|
||||
metadataRepository, repoId, stagingId );
|
||||
|
||||
if ( log.isDebugEnabled() )
|
||||
{
|
||||
log.debug( "Artifacts in conflict.." );
|
||||
for ( ArtifactMetadata metadata : conflicts )
|
||||
{
|
||||
log.debug( metadata.getNamespace() + ":" + metadata.getProject() + ":" +
|
||||
metadata.getProjectVersion() );
|
||||
}
|
||||
}
|
||||
|
||||
sourceArtifacts.removeAll( conflicts );
|
||||
|
||||
log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
|
||||
|
||||
Filter<ArtifactMetadata> artifactsWithOutConflicts = new IncludesFilter<ArtifactMetadata>(
|
||||
sourceArtifacts );
|
||||
repositoryMerger.merge( metadataRepository, stagingId, repoId, artifactsWithOutConflicts );
|
||||
|
||||
log.info( "Staging repository '" + stagingId + "' merged successfully with managed repo '" +
|
||||
repoId + "'." );
|
||||
}
|
||||
else
|
||||
{
|
||||
repositoryMerger.merge( metadataRepository, stagingId, repoId );
|
||||
|
||||
log.info( "Staging repository '" + stagingId + "' merged successfully with managed repo '" +
|
||||
repoId + "'." );
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
log.info( "Repository to be merged has snapshot artifacts.." );
|
||||
if ( skipConflicts )
|
||||
{
|
||||
List<ArtifactMetadata> conflicts = repositoryMerger.getConflictingArtifacts( repoId,
|
||||
stagingId );
|
||||
|
||||
if ( log.isDebugEnabled() )
|
||||
{
|
||||
log.debug( "Artifacts in conflict.." );
|
||||
for ( ArtifactMetadata metadata : conflicts )
|
||||
{
|
||||
log.debug( metadata.getNamespace() + ":" + metadata.getProject() + ":" +
|
||||
metadata.getProjectVersion() );
|
||||
}
|
||||
}
|
||||
|
||||
sourceArtifacts.removeAll( conflicts );
|
||||
|
||||
log.debug( "Source artifacts size :: " + sourceArtifacts.size() );
|
||||
|
||||
Filter<ArtifactMetadata> artifactsWithOutConflicts = new IncludesFilter<ArtifactMetadata>(
|
||||
sourceArtifacts );
|
||||
repositoryMerger.merge( stagingId, repoId, artifactsWithOutConflicts );
|
||||
|
||||
log.info(
|
||||
"Staging repository '" + stagingId + "' merged successfully with managed repo '" + repoId +
|
||||
"'." );
|
||||
}
|
||||
else
|
||||
{
|
||||
repositoryMerger.merge( stagingId, repoId );
|
||||
|
||||
log.info(
|
||||
"Staging repository '" + stagingId + "' merged successfully with managed repo '" + repoId +
|
||||
"'." );
|
||||
}
|
||||
throw new Exception( "Staging Id : " + stagingId + " not found." );
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new Exception( "Staging Id : " + stagingId + " not found." );
|
||||
throw new Exception( "Repository Id : " + repoId + " not found." );
|
||||
}
|
||||
|
||||
if ( !repositoryTaskScheduler.isProcessingRepositoryTask( repoId ) )
|
||||
{
|
||||
RepositoryTask task = new RepositoryTask();
|
||||
task.setRepositoryId( repoId );
|
||||
|
||||
repositoryTaskScheduler.queueTask( task );
|
||||
}
|
||||
|
||||
AuditEvent event = createAuditEvent( repoConfig );
|
||||
|
||||
// add event for audit log reports
|
||||
metadataRepository.addMetadataFacet( event.getRepositoryId(), event );
|
||||
|
||||
// log event in archiva audit log
|
||||
auditListener.auditEvent( createAuditEvent( repoConfig ) );
|
||||
repositorySession.save();
|
||||
}
|
||||
else
|
||||
finally
|
||||
{
|
||||
throw new Exception( "Repository Id : " + repoId + " not found." );
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
if ( !repositoryTaskScheduler.isProcessingRepositoryTask( repoId ) )
|
||||
{
|
||||
RepositoryTask task = new RepositoryTask();
|
||||
task.setRepositoryId( repoId );
|
||||
|
||||
repositoryTaskScheduler.queueTask( task );
|
||||
}
|
||||
|
||||
AuditEvent event = createAuditEvent( repoConfig );
|
||||
|
||||
// add event for audit log reports
|
||||
metadataRepository.addMetadataFacet( event.getRepositoryId(), event );
|
||||
|
||||
// log event in archiva audit log
|
||||
auditListener.auditEvent( createAuditEvent( repoConfig ) );
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -614,7 +642,8 @@ public class AdministrationServiceImpl
|
|||
return event;
|
||||
}
|
||||
|
||||
private void mergeWithOutSnapshots( List<ArtifactMetadata> sourceArtifacts, String sourceRepoId, String repoid )
|
||||
private void mergeWithOutSnapshots( List<ArtifactMetadata> sourceArtifacts, String sourceRepoId, String repoid,
|
||||
MetadataRepository metadataRepository )
|
||||
throws Exception
|
||||
{
|
||||
List<ArtifactMetadata> artifactsWithOutSnapshots = new ArrayList<ArtifactMetadata>();
|
||||
|
@ -631,7 +660,7 @@ public class AdministrationServiceImpl
|
|||
|
||||
Filter<ArtifactMetadata> artifactListWithOutSnapShots = new IncludesFilter<ArtifactMetadata>( sourceArtifacts );
|
||||
|
||||
repositoryMerger.merge( sourceRepoId, repoid, artifactListWithOutSnapShots );
|
||||
repositoryMerger.merge( metadataRepository, sourceRepoId, repoid, artifactListWithOutSnapShots );
|
||||
}
|
||||
|
||||
private ManagedRepositoryConfiguration getStageRepoConfig( ManagedRepositoryConfiguration repository )
|
||||
|
|
|
@ -29,6 +29,8 @@ import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
|||
import org.apache.archiva.metadata.model.ProjectVersionReference;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolver;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.storage.maven2.MavenArtifactFacet;
|
||||
import org.apache.archiva.metadata.repository.storage.maven2.MavenProjectFacet;
|
||||
import org.apache.archiva.web.xmlrpc.api.SearchService;
|
||||
|
@ -48,17 +50,14 @@ public class SearchServiceImpl
|
|||
|
||||
private XmlRpcUserRepositories xmlRpcUserRepositories;
|
||||
|
||||
private MetadataResolver metadataResolver;
|
||||
private RepositorySessionFactory repositorySessionFactory;
|
||||
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
public SearchServiceImpl( XmlRpcUserRepositories xmlRpcUserRepositories, MetadataResolver metadataResolver,
|
||||
MetadataRepository metadataRepository, RepositorySearch search )
|
||||
public SearchServiceImpl( XmlRpcUserRepositories xmlRpcUserRepositories,
|
||||
RepositorySessionFactory repositorySessionFactory, RepositorySearch search )
|
||||
{
|
||||
this.xmlRpcUserRepositories = xmlRpcUserRepositories;
|
||||
this.search = search;
|
||||
this.metadataResolver = metadataResolver;
|
||||
this.metadataRepository = metadataRepository;
|
||||
this.repositorySessionFactory = repositorySessionFactory;
|
||||
}
|
||||
|
||||
@SuppressWarnings( "unchecked" )
|
||||
|
@ -72,45 +71,57 @@ public class SearchServiceImpl
|
|||
|
||||
results = search.search( "", observableRepos, queryString, limits, null );
|
||||
|
||||
for ( SearchResultHit resultHit : results.getHits() )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
List<String> resultHitVersions = resultHit.getVersions();
|
||||
if ( resultHitVersions != null )
|
||||
MetadataResolver metadataResolver = repositorySession.getResolver();
|
||||
|
||||
for ( SearchResultHit resultHit : results.getHits() )
|
||||
{
|
||||
for ( String version : resultHitVersions )
|
||||
List<String> resultHitVersions = resultHit.getVersions();
|
||||
if ( resultHitVersions != null )
|
||||
{
|
||||
Artifact artifact = null;
|
||||
for ( String repoId : observableRepos )
|
||||
for ( String version : resultHitVersions )
|
||||
{
|
||||
// slight behaviour change to previous implementation: instead of allocating "jar" when not
|
||||
// found in the database, we can rely on the metadata repository to create it on the fly. We
|
||||
// just allocate the default packaging if the Maven facet is not found.
|
||||
FacetedMetadata model = metadataResolver.resolveProjectVersion( repoId, resultHit.getGroupId(),
|
||||
resultHit.getArtifactId(),
|
||||
version );
|
||||
|
||||
if ( model != null )
|
||||
Artifact artifact = null;
|
||||
for ( String repoId : observableRepos )
|
||||
{
|
||||
String packaging = "jar";
|
||||
// slight behaviour change to previous implementation: instead of allocating "jar" when not
|
||||
// found in the database, we can rely on the metadata repository to create it on the fly. We
|
||||
// just allocate the default packaging if the Maven facet is not found.
|
||||
FacetedMetadata model = metadataResolver.resolveProjectVersion( repositorySession, repoId,
|
||||
resultHit.getGroupId(),
|
||||
resultHit.getArtifactId(),
|
||||
version );
|
||||
|
||||
MavenProjectFacet facet = (MavenProjectFacet) model.getFacet( MavenProjectFacet.FACET_ID );
|
||||
if ( facet != null && facet.getPackaging() != null )
|
||||
if ( model != null )
|
||||
{
|
||||
packaging = facet.getPackaging();
|
||||
}
|
||||
artifact = new Artifact( repoId, resultHit.getGroupId(), resultHit.getArtifactId(), version,
|
||||
packaging );
|
||||
break;
|
||||
}
|
||||
}
|
||||
String packaging = "jar";
|
||||
|
||||
if ( artifact != null )
|
||||
{
|
||||
artifacts.add( artifact );
|
||||
MavenProjectFacet facet = (MavenProjectFacet) model.getFacet(
|
||||
MavenProjectFacet.FACET_ID );
|
||||
if ( facet != null && facet.getPackaging() != null )
|
||||
{
|
||||
packaging = facet.getPackaging();
|
||||
}
|
||||
artifact = new Artifact( repoId, resultHit.getGroupId(), resultHit.getArtifactId(),
|
||||
version, packaging );
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ( artifact != null )
|
||||
{
|
||||
artifacts.add( artifact );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
return artifacts;
|
||||
}
|
||||
|
@ -120,18 +131,30 @@ public class SearchServiceImpl
|
|||
{
|
||||
List<String> observableRepos = xmlRpcUserRepositories.getObservableRepositories();
|
||||
|
||||
List<Artifact> results = new ArrayList<Artifact>();
|
||||
for ( String repoId : observableRepos )
|
||||
{
|
||||
for ( ArtifactMetadata artifact : metadataRepository.getArtifactsByChecksum( repoId, checksum ) )
|
||||
{
|
||||
// TODO: customise XMLRPC to handle non-Maven artifacts
|
||||
MavenArtifactFacet facet = (MavenArtifactFacet) artifact.getFacet( MavenArtifactFacet.FACET_ID );
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
|
||||
results.add( new Artifact( artifact.getRepositoryId(), artifact.getNamespace(), artifact.getProject(),
|
||||
artifact.getVersion(), facet != null ? facet.getType() : null ) );
|
||||
List<Artifact> results = new ArrayList<Artifact>();
|
||||
try
|
||||
{
|
||||
MetadataRepository metadataRepository = repositorySession.getRepository();
|
||||
|
||||
for ( String repoId : observableRepos )
|
||||
{
|
||||
for ( ArtifactMetadata artifact : metadataRepository.getArtifactsByChecksum( repoId, checksum ) )
|
||||
{
|
||||
// TODO: customise XMLRPC to handle non-Maven artifacts
|
||||
MavenArtifactFacet facet = (MavenArtifactFacet) artifact.getFacet( MavenArtifactFacet.FACET_ID );
|
||||
|
||||
results.add( new Artifact( artifact.getRepositoryId(), artifact.getNamespace(),
|
||||
artifact.getProject(), artifact.getVersion(),
|
||||
facet != null ? facet.getType() : null ) );
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -141,17 +164,28 @@ public class SearchServiceImpl
|
|||
List<Artifact> artifacts = new ArrayList<Artifact>();
|
||||
List<String> observableRepos = xmlRpcUserRepositories.getObservableRepositories();
|
||||
|
||||
for ( String repoId : observableRepos )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
Collection<String> results = metadataResolver.resolveProjectVersions( repoId, groupId, artifactId );
|
||||
MetadataResolver metadataResolver = repositorySession.getResolver();
|
||||
|
||||
for ( final String version : results )
|
||||
for ( String repoId : observableRepos )
|
||||
{
|
||||
final Artifact artifact = new Artifact( repoId, groupId, artifactId, version, "pom" );
|
||||
Collection<String> results = metadataResolver.resolveProjectVersions( repositorySession, repoId,
|
||||
groupId, artifactId );
|
||||
|
||||
artifacts.add( artifact );
|
||||
for ( final String version : results )
|
||||
{
|
||||
final Artifact artifact = new Artifact( repoId, groupId, artifactId, version, "pom" );
|
||||
|
||||
artifacts.add( artifact );
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
return artifacts;
|
||||
}
|
||||
|
@ -174,23 +208,33 @@ public class SearchServiceImpl
|
|||
{
|
||||
List<String> observableRepos = xmlRpcUserRepositories.getObservableRepositories();
|
||||
|
||||
for ( String repoId : observableRepos )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
ProjectVersionMetadata model = metadataResolver.resolveProjectVersion( repoId, groupId, artifactId,
|
||||
version );
|
||||
if ( model != null )
|
||||
MetadataResolver metadataResolver = repositorySession.getResolver();
|
||||
|
||||
for ( String repoId : observableRepos )
|
||||
{
|
||||
List<Dependency> dependencies = new ArrayList<Dependency>();
|
||||
List<org.apache.archiva.metadata.model.Dependency> modelDeps = model.getDependencies();
|
||||
for ( org.apache.archiva.metadata.model.Dependency dep : modelDeps )
|
||||
ProjectVersionMetadata model = metadataResolver.resolveProjectVersion( repositorySession, repoId,
|
||||
groupId, artifactId, version );
|
||||
if ( model != null )
|
||||
{
|
||||
Dependency dependency = new Dependency( dep.getGroupId(), dep.getArtifactId(), dep.getVersion(),
|
||||
dep.getClassifier(), dep.getType(), dep.getScope() );
|
||||
dependencies.add( dependency );
|
||||
List<Dependency> dependencies = new ArrayList<Dependency>();
|
||||
List<org.apache.archiva.metadata.model.Dependency> modelDeps = model.getDependencies();
|
||||
for ( org.apache.archiva.metadata.model.Dependency dep : modelDeps )
|
||||
{
|
||||
Dependency dependency = new Dependency( dep.getGroupId(), dep.getArtifactId(), dep.getVersion(),
|
||||
dep.getClassifier(), dep.getType(), dep.getScope() );
|
||||
dependencies.add( dependency );
|
||||
}
|
||||
return dependencies;
|
||||
}
|
||||
return dependencies;
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
throw new Exception( "Artifact does not exist." );
|
||||
}
|
||||
|
||||
|
@ -209,16 +253,28 @@ public class SearchServiceImpl
|
|||
List<Artifact> artifacts = new ArrayList<Artifact>();
|
||||
List<String> observableRepos = xmlRpcUserRepositories.getObservableRepositories();
|
||||
|
||||
for ( String repoId : observableRepos )
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
Collection<ProjectVersionReference> refs = metadataResolver.resolveProjectReferences( repoId, groupId,
|
||||
artifactId, version );
|
||||
for ( ProjectVersionReference ref : refs )
|
||||
MetadataResolver metadataResolver = repositorySession.getResolver();
|
||||
|
||||
for ( String repoId : observableRepos )
|
||||
{
|
||||
artifacts.add( new Artifact( repoId, ref.getNamespace(), ref.getProjectId(), ref.getProjectVersion(),
|
||||
"" ) );
|
||||
Collection<ProjectVersionReference> refs = metadataResolver.resolveProjectReferences( repositorySession,
|
||||
repoId, groupId,
|
||||
artifactId,
|
||||
version );
|
||||
for ( ProjectVersionReference ref : refs )
|
||||
{
|
||||
artifacts.add( new Artifact( repoId, ref.getNamespace(), ref.getProjectId(),
|
||||
ref.getProjectVersion(), "" ) );
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
return artifacts;
|
||||
}
|
||||
|
|
|
@ -23,6 +23,8 @@ import org.apache.archiva.audit.AuditEvent;
|
|||
import org.apache.archiva.audit.AuditListener;
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.filter.Filter;
|
||||
import org.apache.archiva.metadata.repository.filter.IncludesFilter;
|
||||
import org.apache.archiva.metadata.repository.stats.RepositoryStatisticsManager;
|
||||
|
@ -63,6 +65,9 @@ import java.util.HashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
* AdministrationServiceImplTest
|
||||
*
|
||||
|
@ -162,6 +167,12 @@ public class AdministrationServiceImplTest
|
|||
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
|
||||
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
|
||||
|
||||
RepositorySession repositorySession = mock( RepositorySession.class );
|
||||
when( repositorySession.getRepository() ).thenReturn( metadataRepository );
|
||||
|
||||
RepositorySessionFactory repositorySessionFactory = mock( RepositorySessionFactory.class );
|
||||
when( repositorySessionFactory.createSession() ).thenReturn( repositorySession );
|
||||
|
||||
listenerControl = MockControl.createControl( RepositoryListener.class );
|
||||
listener = (RepositoryListener) listenerControl.getMock();
|
||||
|
||||
|
@ -175,8 +186,9 @@ public class AdministrationServiceImplTest
|
|||
auditListener = (AuditListener) auditListenerControl.getMock();
|
||||
|
||||
service = new AdministrationServiceImpl( archivaConfig, repoConsumersUtil, repositoryFactory,
|
||||
metadataRepository, repositoryTaskScheduler, Collections.singletonList(
|
||||
listener ), repositoryStatisticsManager, repositoryMerger, auditListener );
|
||||
repositorySessionFactory, repositoryTaskScheduler,
|
||||
Collections.singletonList( listener ), repositoryStatisticsManager,
|
||||
repositoryMerger, auditListener );
|
||||
}
|
||||
|
||||
/* Tests for repository consumers */
|
||||
|
@ -342,8 +354,8 @@ public class AdministrationServiceImplTest
|
|||
metadataRepository.removeArtifact( repoContent.getId(), artifact.getNamespace(), artifact.getProject(),
|
||||
artifact.getVersion(), artifact.getId() );
|
||||
|
||||
listener.deleteArtifact( repoContent.getId(), artifact.getNamespace(), artifact.getProject(),
|
||||
artifact.getVersion(), artifact.getId() );
|
||||
listener.deleteArtifact( metadataRepository, repoContent.getId(), artifact.getNamespace(),
|
||||
artifact.getProject(), artifact.getVersion(), artifact.getId() );
|
||||
listenerControl.setVoidCallable( 1 );
|
||||
|
||||
archivaConfigControl.replay();
|
||||
|
@ -399,8 +411,8 @@ public class AdministrationServiceImplTest
|
|||
metadataRepository.removeArtifact( repoContent.getId(), artifact.getNamespace(), artifact.getProject(),
|
||||
artifact.getVersion(), artifact.getId() );
|
||||
|
||||
listener.deleteArtifact( repoContent.getId(), artifact.getNamespace(), artifact.getProject(),
|
||||
artifact.getVersion(), artifact.getId() );
|
||||
listener.deleteArtifact( metadataRepository, repoContent.getId(), artifact.getNamespace(),
|
||||
artifact.getProject(), artifact.getVersion(), artifact.getId() );
|
||||
listenerControl.setVoidCallable( 1 );
|
||||
|
||||
archivaConfigControl.replay();
|
||||
|
@ -725,10 +737,11 @@ public class AdministrationServiceImplTest
|
|||
configControl.expectAndReturn( config.findManagedRepositoryById( "merge-stage" ), staging );
|
||||
|
||||
metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( staging.getId() ), sources );
|
||||
repositoryMergerControl.expectAndDefaultReturn( repositoryMerger.getConflictingArtifacts( staging.getId(),
|
||||
repositoryMergerControl.expectAndDefaultReturn( repositoryMerger.getConflictingArtifacts( metadataRepository,
|
||||
staging.getId(),
|
||||
merge.getId() ),
|
||||
sources );
|
||||
repositoryMerger.merge( staging.getId(), merge.getId() );
|
||||
repositoryMerger.merge( metadataRepository, staging.getId(), merge.getId() );
|
||||
repositoryMergerControl.setVoidCallable();
|
||||
repositoryTaskSchedulerControl.expectAndReturn( repositoryTaskScheduler.isProcessingRepositoryTask( "merge" ),
|
||||
false );
|
||||
|
@ -794,10 +807,11 @@ public class AdministrationServiceImplTest
|
|||
configControl.expectAndReturn( config.findManagedRepositoryById( "repo-stage" ), staging );
|
||||
|
||||
metadataRepositoryControl.expectAndReturn( metadataRepository.getArtifacts( staging.getId() ), sources );
|
||||
repositoryMergerControl.expectAndDefaultReturn( repositoryMerger.getConflictingArtifacts( staging.getId(),
|
||||
repositoryMergerControl.expectAndDefaultReturn( repositoryMerger.getConflictingArtifacts( metadataRepository,
|
||||
staging.getId(),
|
||||
repo.getId() ),
|
||||
conflicts );
|
||||
repositoryMerger.merge( staging.getId(), repo.getId(), artifactsWithOutConflicts );
|
||||
repositoryMerger.merge( metadataRepository, staging.getId(), repo.getId(), artifactsWithOutConflicts );
|
||||
repositoryMergerControl.setMatcher( MockControl.ALWAYS_MATCHER );
|
||||
repositoryMergerControl.setVoidCallable();
|
||||
repositoryTaskSchedulerControl.expectAndReturn( repositoryTaskScheduler.isProcessingRepositoryTask( "repo" ),
|
||||
|
|
|
@ -30,6 +30,8 @@ import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
|||
import org.apache.archiva.metadata.model.ProjectVersionReference;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolver;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.storage.maven2.MavenArtifactFacet;
|
||||
import org.apache.archiva.metadata.repository.storage.maven2.MavenProjectFacet;
|
||||
import org.apache.archiva.web.xmlrpc.api.SearchService;
|
||||
|
@ -46,6 +48,9 @@ import java.util.Collections;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
/**
|
||||
* SearchServiceImplTest
|
||||
*
|
||||
|
@ -80,6 +85,8 @@ public class SearchServiceImplTest
|
|||
|
||||
private static final String TEST_REPO = "test-repo";
|
||||
|
||||
private RepositorySession repositorySession;
|
||||
|
||||
@Override
|
||||
public void setUp()
|
||||
throws Exception
|
||||
|
@ -97,7 +104,13 @@ public class SearchServiceImplTest
|
|||
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
|
||||
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
|
||||
|
||||
searchService = new SearchServiceImpl( userRepos, metadataResolver, metadataRepository, search );
|
||||
repositorySession = mock( RepositorySession.class );
|
||||
when( repositorySession.getResolver() ).thenReturn( metadataResolver );
|
||||
when( repositorySession.getRepository() ).thenReturn( metadataRepository );
|
||||
RepositorySessionFactory repositorySessionFactory = mock( RepositorySessionFactory.class );
|
||||
when( repositorySessionFactory.createSession() ).thenReturn( repositorySession );
|
||||
|
||||
searchService = new SearchServiceImpl( userRepos, repositorySessionFactory, search );
|
||||
}
|
||||
|
||||
// MRM-1230
|
||||
|
@ -134,7 +147,8 @@ public class SearchServiceImplTest
|
|||
facet.setPackaging( "war" );
|
||||
model.addFacet( facet );
|
||||
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( "repo1.mirror",
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession,
|
||||
"repo1.mirror",
|
||||
ARCHIVA_TEST_GROUP_ID,
|
||||
"archiva-webapp", "1.0" ),
|
||||
model );
|
||||
|
@ -189,14 +203,16 @@ public class SearchServiceImplTest
|
|||
searchControl.expectAndDefaultReturn( search.search( "", observableRepoIds, "archiva", limits, null ),
|
||||
results );
|
||||
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( "repo1.mirror",
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession,
|
||||
"repo1.mirror",
|
||||
ARCHIVA_TEST_GROUP_ID,
|
||||
ARCHIVA_TEST_ARTIFACT_ID,
|
||||
"1.0" ), null );
|
||||
|
||||
ProjectVersionMetadata model = new ProjectVersionMetadata();
|
||||
model.setId( "1.0" );
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( "public.releases",
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession,
|
||||
"public.releases",
|
||||
ARCHIVA_TEST_GROUP_ID,
|
||||
ARCHIVA_TEST_ARTIFACT_ID,
|
||||
"1.0" ), model );
|
||||
|
@ -256,7 +272,8 @@ public class SearchServiceImplTest
|
|||
facet.setPackaging( "jar" );
|
||||
model.addFacet( facet );
|
||||
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( "repo1.mirror",
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession,
|
||||
"repo1.mirror",
|
||||
ARCHIVA_TEST_GROUP_ID,
|
||||
ARCHIVA_TEST_ARTIFACT_ID,
|
||||
"1.0" ), model );
|
||||
|
@ -349,11 +366,13 @@ public class SearchServiceImplTest
|
|||
observableRepoIds.add( "public.releases" );
|
||||
|
||||
userReposControl.expectAndReturn( userRepos.getObservableRepositories(), observableRepoIds );
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersions( "repo1.mirror",
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersions( repositorySession,
|
||||
"repo1.mirror",
|
||||
ARCHIVA_TEST_GROUP_ID,
|
||||
ARCHIVA_TEST_ARTIFACT_ID ),
|
||||
Arrays.asList( "1.0", "1.1-beta-2", "1.2" ) );
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersions( "public.releases",
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersions( repositorySession,
|
||||
"public.releases",
|
||||
ARCHIVA_TEST_GROUP_ID,
|
||||
ARCHIVA_TEST_ARTIFACT_ID ),
|
||||
Arrays.asList( "1.1-beta-1", "1.1", "1.2.1-SNAPSHOT" ) );
|
||||
|
@ -418,7 +437,8 @@ public class SearchServiceImplTest
|
|||
model.addDependency( dependency );
|
||||
|
||||
userReposControl.expectAndReturn( userRepos.getObservableRepositories(), Collections.singletonList( repoId ) );
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repoId, ARCHIVA_TEST_GROUP_ID,
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession, repoId,
|
||||
ARCHIVA_TEST_GROUP_ID,
|
||||
ARCHIVA_TEST_ARTIFACT_ID,
|
||||
"1.0" ), model );
|
||||
|
||||
|
@ -444,7 +464,8 @@ public class SearchServiceImplTest
|
|||
String repoId = "repo1.mirror";
|
||||
|
||||
userReposControl.expectAndReturn( userRepos.getObservableRepositories(), Collections.singletonList( repoId ) );
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repoId, ARCHIVA_TEST_GROUP_ID,
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectVersion( repositorySession, repoId,
|
||||
ARCHIVA_TEST_GROUP_ID,
|
||||
ARCHIVA_TEST_ARTIFACT_ID,
|
||||
"1.0" ), null );
|
||||
|
||||
|
@ -498,7 +519,7 @@ public class SearchServiceImplTest
|
|||
dependeeModels.add( dependeeModel );
|
||||
|
||||
userReposControl.expectAndReturn( userRepos.getObservableRepositories(), observableRepoIds );
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( repoId,
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( repositorySession, repoId,
|
||||
ARCHIVA_TEST_GROUP_ID,
|
||||
ARCHIVA_TEST_ARTIFACT_ID,
|
||||
"1.0" ), dependeeModels );
|
||||
|
@ -529,12 +550,14 @@ public class SearchServiceImplTest
|
|||
// no longer differentiating between a project not being present and a project that is present but with
|
||||
// no references. If it is later determined to be needed, we will need to modify the metadata content repository
|
||||
userReposControl.expectAndReturn( userRepos.getObservableRepositories(), observableRepoIds );
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( "repo1.mirror",
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( repositorySession,
|
||||
"repo1.mirror",
|
||||
ARCHIVA_TEST_GROUP_ID,
|
||||
ARCHIVA_TEST_ARTIFACT_ID,
|
||||
"1.0" ),
|
||||
Collections.<ProjectVersionReference>emptyList() );
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( "public.releases",
|
||||
metadataResolverControl.expectAndReturn( metadataResolver.resolveProjectReferences( repositorySession,
|
||||
"public.releases",
|
||||
ARCHIVA_TEST_GROUP_ID,
|
||||
ARCHIVA_TEST_ARTIFACT_ID,
|
||||
"1.0" ),
|
||||
|
|
|
@ -26,26 +26,40 @@ import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
|||
import org.apache.archiva.metadata.model.ProjectVersionReference;
|
||||
import org.apache.archiva.metadata.repository.filter.ExcludesFilter;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorage;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException;
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Default implementation of the metadata resolver API. At present it will handle updating the content repository
|
||||
* from new or changed information in the model and artifacts from the repository storage.
|
||||
*
|
||||
* This is a singleton component to allow an alternate implementation to be provided. It is intended to be the same
|
||||
* system-wide for the whole content repository instead of on a per-managed-repository basis. Therefore, the session is
|
||||
* passed in as an argument to obtain any necessary resources, rather than the class being instantiated within the
|
||||
* session in the context of a single managed repository's resolution needs.
|
||||
*
|
||||
* Note that the caller is responsible for the session, such as closing and saving (which is implied by the resolver
|
||||
* being obtained from within the session). The {@link RepositorySession#markDirty()} method is used as a hint to ensure
|
||||
* that the session knows we've made changes at close. We cannot ensure the changes will be persisted if the caller
|
||||
* chooses to revert first. This is preferable to storing the metadata immediately - a separate session would require
|
||||
* having a bi-directional link with the session factory, and saving the existing session might save other changes
|
||||
* unknowingly by the caller.
|
||||
*
|
||||
* @plexus.component role="org.apache.archiva.metadata.repository.MetadataResolver"
|
||||
*/
|
||||
public class DefaultMetadataResolver
|
||||
implements MetadataResolver
|
||||
{
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
/**
|
||||
* FIXME: this needs to be configurable based on storage type - and could also be instantiated per repo. Change to a
|
||||
* factory.
|
||||
* factory, and perhaps retrieve from the session. We should avoid creating one per request, however.
|
||||
*
|
||||
* TODO: Also need to accommodate availability of proxy module
|
||||
* ... could be a different type since we need methods to modify the storage metadata, which would also allow more
|
||||
|
@ -55,12 +69,19 @@ public class DefaultMetadataResolver
|
|||
*/
|
||||
private RepositoryStorage repositoryStorage;
|
||||
|
||||
/**
|
||||
* @plexus.requirement role="org.apache.archiva.repository.events.RepositoryListener"
|
||||
*/
|
||||
private List<RepositoryListener> listeners;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger( DefaultMetadataResolver.class );
|
||||
|
||||
public ProjectVersionMetadata resolveProjectVersion( String repoId, String namespace, String projectId,
|
||||
String projectVersion )
|
||||
public ProjectVersionMetadata resolveProjectVersion( RepositorySession session, String repoId, String namespace,
|
||||
String projectId, String projectVersion )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
MetadataRepository metadataRepository = session.getRepository();
|
||||
|
||||
ProjectVersionMetadata metadata = metadataRepository.getProjectVersion( repoId, namespace, projectId,
|
||||
projectVersion );
|
||||
// TODO: do we want to detect changes as well by comparing timestamps? isProjectVersionNewerThan(updated)
|
||||
|
@ -70,9 +91,10 @@ public class DefaultMetadataResolver
|
|||
// may then work here and be more efficient than always trying again)
|
||||
if ( metadata == null || metadata.isIncomplete() )
|
||||
{
|
||||
metadata = repositoryStorage.readProjectVersionMetadata( repoId, namespace, projectId, projectVersion );
|
||||
if ( metadata != null )
|
||||
try
|
||||
{
|
||||
metadata = repositoryStorage.readProjectVersionMetadata( repoId, namespace, projectId, projectVersion );
|
||||
|
||||
if ( log.isDebugEnabled() )
|
||||
{
|
||||
log.debug( "Resolved project version metadata from storage: " + metadata );
|
||||
|
@ -102,29 +124,54 @@ public class DefaultMetadataResolver
|
|||
}
|
||||
try
|
||||
{
|
||||
for ( RepositoryListener listener : listeners )
|
||||
{
|
||||
listener.addArtifact( session, repoId, namespace, projectId, metadata );
|
||||
}
|
||||
metadataRepository.updateProjectVersion( repoId, namespace, projectId, metadata );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
|
||||
}
|
||||
|
||||
session.markDirty();
|
||||
}
|
||||
catch ( RepositoryStorageMetadataInvalidException e )
|
||||
{
|
||||
for ( RepositoryListener listener : listeners )
|
||||
{
|
||||
listener.addArtifactProblem( session, repoId, namespace, projectId, projectVersion, e );
|
||||
}
|
||||
throw new MetadataResolutionException( e.getMessage(), e );
|
||||
}
|
||||
catch ( RepositoryStorageMetadataNotFoundException e )
|
||||
{
|
||||
for ( RepositoryListener listener : listeners )
|
||||
{
|
||||
listener.addArtifactProblem( session, repoId, namespace, projectId, projectVersion, e );
|
||||
}
|
||||
// no need to rethrow - return null
|
||||
}
|
||||
}
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public Collection<ProjectVersionReference> resolveProjectReferences( String repoId, String namespace,
|
||||
String projectId, String projectVersion )
|
||||
public Collection<ProjectVersionReference> resolveProjectReferences( RepositorySession session, String repoId,
|
||||
String namespace, String projectId,
|
||||
String projectVersion )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
// TODO: is this assumption correct? could a storage mech. actually know all references in a non-Maven scenario?
|
||||
// not passed to the storage mechanism as resolving references would require iterating all artifacts
|
||||
MetadataRepository metadataRepository = session.getRepository();
|
||||
return metadataRepository.getProjectReferences( repoId, namespace, projectId, projectVersion );
|
||||
}
|
||||
|
||||
public Collection<String> resolveRootNamespaces( String repoId )
|
||||
public Collection<String> resolveRootNamespaces( RepositorySession session, String repoId )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
MetadataRepository metadataRepository = session.getRepository();
|
||||
Collection<String> namespaces = metadataRepository.getRootNamespaces( repoId );
|
||||
Collection<String> storageNamespaces = repositoryStorage.listRootNamespaces( repoId, new ExcludesFilter<String>(
|
||||
namespaces ) );
|
||||
|
@ -145,15 +192,18 @@ public class DefaultMetadataResolver
|
|||
log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
|
||||
}
|
||||
}
|
||||
session.markDirty();
|
||||
|
||||
namespaces = new ArrayList<String>( namespaces );
|
||||
namespaces.addAll( storageNamespaces );
|
||||
}
|
||||
return namespaces;
|
||||
}
|
||||
|
||||
public Collection<String> resolveNamespaces( String repoId, String namespace )
|
||||
public Collection<String> resolveNamespaces( RepositorySession session, String repoId, String namespace )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
MetadataRepository metadataRepository = session.getRepository();
|
||||
Collection<String> namespaces = metadataRepository.getNamespaces( repoId, namespace );
|
||||
Collection<String> exclusions = new ArrayList<String>( namespaces );
|
||||
exclusions.addAll( metadataRepository.getProjects( repoId, namespace ) );
|
||||
|
@ -177,15 +227,18 @@ public class DefaultMetadataResolver
|
|||
log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
|
||||
}
|
||||
}
|
||||
session.markDirty();
|
||||
|
||||
namespaces = new ArrayList<String>( namespaces );
|
||||
namespaces.addAll( storageNamespaces );
|
||||
}
|
||||
return namespaces;
|
||||
}
|
||||
|
||||
public Collection<String> resolveProjects( String repoId, String namespace )
|
||||
public Collection<String> resolveProjects( RepositorySession session, String repoId, String namespace )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
MetadataRepository metadataRepository = session.getRepository();
|
||||
Collection<String> projects = metadataRepository.getProjects( repoId, namespace );
|
||||
Collection<String> exclusions = new ArrayList<String>( projects );
|
||||
exclusions.addAll( metadataRepository.getNamespaces( repoId, namespace ) );
|
||||
|
@ -212,15 +265,19 @@ public class DefaultMetadataResolver
|
|||
}
|
||||
}
|
||||
}
|
||||
session.markDirty();
|
||||
|
||||
projects = new ArrayList<String>( projects );
|
||||
projects.addAll( storageProjects );
|
||||
}
|
||||
return projects;
|
||||
}
|
||||
|
||||
public Collection<String> resolveProjectVersions( String repoId, String namespace, String projectId )
|
||||
public Collection<String> resolveProjectVersions( RepositorySession session, String repoId, String namespace,
|
||||
String projectId )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
MetadataRepository metadataRepository = session.getRepository();
|
||||
Collection<String> projectVersions = metadataRepository.getProjectVersions( repoId, namespace, projectId );
|
||||
Collection<String> storageProjectVersions = repositoryStorage.listProjectVersions( repoId, namespace, projectId,
|
||||
new ExcludesFilter<String>(
|
||||
|
@ -239,39 +296,55 @@ public class DefaultMetadataResolver
|
|||
namespace,
|
||||
projectId,
|
||||
projectVersion );
|
||||
if ( versionMetadata != null )
|
||||
for ( RepositoryListener listener : listeners )
|
||||
{
|
||||
metadataRepository.updateProjectVersion( repoId, namespace, projectId, versionMetadata );
|
||||
listener.addArtifact( session, repoId, namespace, projectId, versionMetadata );
|
||||
}
|
||||
}
|
||||
catch ( MetadataResolutionException e )
|
||||
{
|
||||
log.warn( "Not update project in metadata repository due to an error resolving it from storage: " +
|
||||
e.getMessage() );
|
||||
|
||||
metadataRepository.updateProjectVersion( repoId, namespace, projectId, versionMetadata );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
|
||||
}
|
||||
catch ( RepositoryStorageMetadataInvalidException e )
|
||||
{
|
||||
log.warn( "Not update project in metadata repository due to an error resolving it from storage: " +
|
||||
e.getMessage() );
|
||||
|
||||
for ( RepositoryListener listener : listeners )
|
||||
{
|
||||
listener.addArtifactProblem( session, repoId, namespace, projectId, projectVersion, e );
|
||||
}
|
||||
}
|
||||
catch ( RepositoryStorageMetadataNotFoundException e )
|
||||
{
|
||||
for ( RepositoryListener listener : listeners )
|
||||
{
|
||||
listener.addArtifactProblem( session, repoId, namespace, projectId, projectVersion, e );
|
||||
}
|
||||
}
|
||||
}
|
||||
session.markDirty();
|
||||
|
||||
projectVersions = new ArrayList<String>( projectVersions );
|
||||
projectVersions.addAll( storageProjectVersions );
|
||||
}
|
||||
return projectVersions;
|
||||
}
|
||||
|
||||
public Collection<ArtifactMetadata> resolveArtifacts( String repoId, String namespace, String projectId,
|
||||
String projectVersion )
|
||||
public Collection<ArtifactMetadata> resolveArtifacts( RepositorySession session, String repoId, String namespace,
|
||||
String projectId, String projectVersion )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
MetadataRepository metadataRepository = session.getRepository();
|
||||
Collection<ArtifactMetadata> artifacts = metadataRepository.getArtifacts( repoId, namespace, projectId,
|
||||
projectVersion );
|
||||
ExcludesFilter<String> filter = new ExcludesFilter<String>( createArtifactIdList( artifacts ) );
|
||||
Collection<ArtifactMetadata> storageArtifacts = repositoryStorage.readArtifactsMetadata( repoId, namespace,
|
||||
projectId,
|
||||
projectVersion,
|
||||
new ExcludesFilter<String>(
|
||||
createArtifactIdList(
|
||||
artifacts ) ) );
|
||||
filter );
|
||||
if ( storageArtifacts != null && !storageArtifacts.isEmpty() )
|
||||
{
|
||||
if ( log.isDebugEnabled() )
|
||||
|
@ -289,6 +362,8 @@ public class DefaultMetadataResolver
|
|||
log.warn( "Unable to persist resolved information: " + e.getMessage(), e );
|
||||
}
|
||||
}
|
||||
session.markDirty();
|
||||
|
||||
artifacts = new ArrayList<ArtifactMetadata>( artifacts );
|
||||
artifacts.addAll( storageArtifacts );
|
||||
}
|
||||
|
|
|
@ -27,8 +27,8 @@ import java.util.Collection;
|
|||
|
||||
public interface MetadataResolver
|
||||
{
|
||||
ProjectVersionMetadata resolveProjectVersion( String repoId, String namespace, String projectId,
|
||||
String projectVersion )
|
||||
ProjectVersionMetadata resolveProjectVersion( RepositorySession session, String repoId, String namespace,
|
||||
String projectId, String projectVersion )
|
||||
throws MetadataResolutionException;
|
||||
|
||||
/**
|
||||
|
@ -42,23 +42,25 @@ public interface MetadataResolver
|
|||
* @param projectVersion the version of the project to get references to
|
||||
* @return a list of project references
|
||||
*/
|
||||
Collection<ProjectVersionReference> resolveProjectReferences( String repoId, String namespace, String projectId,
|
||||
Collection<ProjectVersionReference> resolveProjectReferences( RepositorySession session, String repoId,
|
||||
String namespace, String projectId,
|
||||
String projectVersion )
|
||||
throws MetadataResolutionException;
|
||||
|
||||
Collection<String> resolveRootNamespaces( String repoId )
|
||||
Collection<String> resolveRootNamespaces( RepositorySession session, String repoId )
|
||||
throws MetadataResolutionException;
|
||||
|
||||
Collection<String> resolveNamespaces( String repoId, String namespace )
|
||||
Collection<String> resolveNamespaces( RepositorySession session, String repoId, String namespace )
|
||||
throws MetadataResolutionException;
|
||||
|
||||
Collection<String> resolveProjects( String repoId, String namespace )
|
||||
Collection<String> resolveProjects( RepositorySession session, String repoId, String namespace )
|
||||
throws MetadataResolutionException;
|
||||
|
||||
Collection<String> resolveProjectVersions( String repoId, String namespace, String projectId )
|
||||
Collection<String> resolveProjectVersions( RepositorySession session, String repoId, String namespace,
|
||||
String projectId )
|
||||
throws MetadataResolutionException;
|
||||
|
||||
Collection<ArtifactMetadata> resolveArtifacts( String repoId, String namespace, String projectId,
|
||||
String projectVersion )
|
||||
Collection<ArtifactMetadata> resolveArtifacts( RepositorySession session, String repoId, String namespace,
|
||||
String projectId, String projectVersion )
|
||||
throws MetadataResolutionException;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,96 @@
|
|||
package org.apache.archiva.metadata.repository;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* The repository session provides a single interface to accessing Archiva repositories. It provides access to three
|
||||
* resources:
|
||||
* <ul>
|
||||
* <li>{@link MetadataRepository} - the metadata content repository for read/write access, in its current state (no
|
||||
* remote resources will be retrieved in the process</li>
|
||||
* <li>{@link MetadataResolver} - access to resolve metadata content, accommodating metadata not yet stored or up to
|
||||
* date in the content repository (i.e. virtualised repositories, remote proxied content, or metadata in a different
|
||||
* model format in the repository storage)</li>
|
||||
* <li>{@link org.apache.archiva.metadata.repository.storage.RepositoryStorage} - access to the physical storage of a
|
||||
* repository and the source artifacts and project models</li>
|
||||
* </ul>
|
||||
*/
|
||||
public class RepositorySession
|
||||
{
|
||||
private final MetadataRepository repository;
|
||||
|
||||
private final MetadataResolver resolver;
|
||||
|
||||
private boolean dirty;
|
||||
|
||||
// FIXME: include storage here too - perhaps a factory based on repository ID, or one per type to retrieve and
|
||||
// operate on a given repo within the storage API
|
||||
|
||||
public RepositorySession( MetadataRepository metadataRepository, MetadataResolver resolver )
|
||||
{
|
||||
this.repository = metadataRepository;
|
||||
this.resolver = resolver;
|
||||
}
|
||||
|
||||
public MetadataRepository getRepository()
|
||||
{
|
||||
return repository;
|
||||
}
|
||||
|
||||
public MetadataResolver getResolver()
|
||||
{
|
||||
return resolver;
|
||||
}
|
||||
|
||||
public void save()
|
||||
{
|
||||
// FIXME
|
||||
|
||||
dirty = false;
|
||||
}
|
||||
|
||||
public void revert()
|
||||
{
|
||||
// FIXME
|
||||
|
||||
dirty = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the session. Required to be called for all open sessions to ensure resources are properly released.
|
||||
* If the session has been marked as dirty, it will be saved. This may save partial changes in the case of a typical
|
||||
* <code>try { ... } finally { ... }</code> approach - if this is a problem, ensure you revert changes when an
|
||||
* exception occurs.
|
||||
*/
|
||||
public void close()
|
||||
{
|
||||
if ( dirty )
|
||||
{
|
||||
save();
|
||||
}
|
||||
|
||||
// FIXME
|
||||
}
|
||||
|
||||
public void markDirty()
|
||||
{
|
||||
this.dirty = true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
package org.apache.archiva.metadata.repository;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
public interface RepositorySessionFactory
|
||||
{
|
||||
RepositorySession createSession();
|
||||
}
|
|
@ -22,7 +22,6 @@ package org.apache.archiva.metadata.repository.storage;
|
|||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.model.ProjectMetadata;
|
||||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolutionException;
|
||||
import org.apache.archiva.metadata.repository.filter.Filter;
|
||||
|
||||
import java.util.Collection;
|
||||
|
@ -30,12 +29,11 @@ import java.util.Collection;
|
|||
// FIXME: we should drop the repoId parameters and attach this to an instance of a repository storage
|
||||
public interface RepositoryStorage
|
||||
{
|
||||
ProjectMetadata readProjectMetadata( String repoId, String namespace, String projectId )
|
||||
throws MetadataResolutionException;
|
||||
ProjectMetadata readProjectMetadata( String repoId, String namespace, String projectId );
|
||||
|
||||
ProjectVersionMetadata readProjectVersionMetadata( String repoId, String namespace, String projectId,
|
||||
String projectVersion )
|
||||
throws MetadataResolutionException;
|
||||
throws RepositoryStorageMetadataInvalidException, RepositoryStorageMetadataNotFoundException;
|
||||
|
||||
Collection<String> listRootNamespaces( String repoId, Filter<String> filter );
|
||||
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
package org.apache.archiva.metadata.repository.storage;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
public class RepositoryStorageMetadataException
|
||||
extends Exception
|
||||
{
|
||||
private final String id;
|
||||
|
||||
protected RepositoryStorageMetadataException( String id, String msg )
|
||||
{
|
||||
super( msg );
|
||||
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
protected RepositoryStorageMetadataException( String id, String msg, Throwable throwable )
|
||||
{
|
||||
super( msg, throwable );
|
||||
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getId()
|
||||
{
|
||||
return id;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
package org.apache.archiva.metadata.repository.storage;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
public class RepositoryStorageMetadataInvalidException
|
||||
extends RepositoryStorageMetadataException
|
||||
{
|
||||
public RepositoryStorageMetadataInvalidException( String id, String msg )
|
||||
{
|
||||
super( id, msg );
|
||||
}
|
||||
|
||||
public RepositoryStorageMetadataInvalidException( String id, String msg, Throwable throwable )
|
||||
{
|
||||
super( id, msg, throwable );
|
||||
}
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
package org.apache.archiva.metadata.repository.storage;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
public class RepositoryStorageMetadataNotFoundException
|
||||
extends RepositoryStorageMetadataException
|
||||
{
|
||||
public RepositoryStorageMetadataNotFoundException( String msg )
|
||||
{
|
||||
super( "missing-pom", msg );
|
||||
}
|
||||
}
|
|
@ -19,15 +19,30 @@ package org.apache.archiva.repository.events;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataException;
|
||||
|
||||
/**
|
||||
* Listen to events on the repository. This class is a stopgap
|
||||
* refactoring measure until an event bus is in place to handle
|
||||
* Listen to events on the repository. This class is a stopgap
|
||||
* refactoring measure until an event bus is in place to handle
|
||||
* generic events such as these.
|
||||
*
|
||||
* This assumes that the events occur before the action has completed, though they don't currently offer any mechanism
|
||||
* to prevent an event from occurring or guarantee that it will happen.
|
||||
*
|
||||
* FIXME: this needs to be made more permanent since 3rd party plugins will depend on it heavily
|
||||
*/
|
||||
public interface RepositoryListener
|
||||
public interface RepositoryListener
|
||||
{
|
||||
/**
|
||||
* Event for the deletion of a given artifact.
|
||||
*/
|
||||
void deleteArtifact( String repositoryId, String namespace, String project, String version, String id );
|
||||
void deleteArtifact( MetadataRepository metadataRepository, String repositoryId, String namespace, String project,
|
||||
String version, String id );
|
||||
|
||||
void addArtifact( RepositorySession session, String repoId, String namespace, String projectId,
|
||||
ProjectVersionMetadata metadata );
|
||||
|
||||
// FIXME: this would be better as a "processException" method, with the event information captured in a single class
|
||||
void addArtifactProblem( RepositorySession session, String repoId, String namespace, String projectId,
|
||||
String projectVersion, RepositoryStorageMetadataException exception );
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.archiva.audit;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
|
||||
import java.util.Collection;
|
||||
|
@ -27,36 +28,39 @@ import java.util.List;
|
|||
|
||||
public interface AuditManager
|
||||
{
|
||||
List<AuditEvent> getMostRecentAuditEvents( List<String> repositoryIds )
|
||||
List<AuditEvent> getMostRecentAuditEvents( MetadataRepository metadataRepository, List<String> repositoryIds )
|
||||
throws MetadataRepositoryException;
|
||||
|
||||
void addAuditEvent( AuditEvent event )
|
||||
void addAuditEvent( MetadataRepository repository, AuditEvent event )
|
||||
throws MetadataRepositoryException;
|
||||
|
||||
void deleteAuditEvents( String repositoryId )
|
||||
void deleteAuditEvents( MetadataRepository metadataRepository, String repositoryId )
|
||||
throws MetadataRepositoryException;
|
||||
|
||||
/**
|
||||
* Get all audit events from the given repositories that match a certain range
|
||||
*
|
||||
* @param repositoryIds the repositories to retrieve events for
|
||||
* @param startTime find events only after this time
|
||||
* @param endTime find events only before this time
|
||||
* @param metadataRepository
|
||||
* @param repositoryIds the repositories to retrieve events for
|
||||
* @param startTime find events only after this time
|
||||
* @param endTime find events only before this time
|
||||
* @return the list of events found
|
||||
*/
|
||||
List<AuditEvent> getAuditEventsInRange( Collection<String> repositoryIds, Date startTime, Date endTime )
|
||||
List<AuditEvent> getAuditEventsInRange( MetadataRepository metadataRepository, Collection<String> repositoryIds,
|
||||
Date startTime, Date endTime )
|
||||
throws MetadataRepositoryException;
|
||||
|
||||
/**
|
||||
* Get all audit events from the given repositories that match a certain range and resource pattern
|
||||
*
|
||||
* @param repositoryIds the repositories to retrieve events for
|
||||
* @param resourcePattern find all events whose resources start with this string
|
||||
* @param startTime find events only after this time
|
||||
* @param endTime find events only before this time
|
||||
* @param metadataRepository
|
||||
* @param repositoryIds the repositories to retrieve events for
|
||||
* @param resourcePattern find all events whose resources start with this string
|
||||
* @param startTime find events only after this time
|
||||
* @param endTime find events only before this time
|
||||
* @return the list of events found
|
||||
*/
|
||||
List<AuditEvent> getAuditEventsInRange( Collection<String> repositoryIds, String resourcePattern, Date startTime,
|
||||
Date endTime )
|
||||
List<AuditEvent> getAuditEventsInRange( MetadataRepository metadataRepository, Collection<String> repositoryIds,
|
||||
String resourcePattern, Date startTime, Date endTime )
|
||||
throws MetadataRepositoryException;
|
||||
}
|
||||
|
|
|
@ -40,18 +40,14 @@ import java.util.TimeZone;
|
|||
public class DefaultAuditManager
|
||||
implements AuditManager
|
||||
{
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
private static final int NUM_RECENT_REVENTS = 10;
|
||||
private static final int NUM_RECENT_EVENTS = 10;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger( DefaultAuditManager.class );
|
||||
|
||||
private static final TimeZone UTC_TIME_ZONE = TimeZone.getTimeZone( "UTC" );
|
||||
|
||||
public List<AuditEvent> getMostRecentAuditEvents( List<String> repositoryIds )
|
||||
public List<AuditEvent> getMostRecentAuditEvents( MetadataRepository metadataRepository,
|
||||
List<String> repositoryIds )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
// TODO: consider a more efficient implementation that directly gets the last ten from the content repository
|
||||
|
@ -65,7 +61,7 @@ public class DefaultAuditManager
|
|||
}
|
||||
}
|
||||
Collections.sort( records );
|
||||
records = records.subList( 0, records.size() < NUM_RECENT_REVENTS ? records.size() : NUM_RECENT_REVENTS );
|
||||
records = records.subList( 0, records.size() < NUM_RECENT_EVENTS ? records.size() : NUM_RECENT_EVENTS );
|
||||
|
||||
List<AuditEvent> events = new ArrayList<AuditEvent>( records.size() );
|
||||
for ( AuditRecord record : records )
|
||||
|
@ -78,29 +74,31 @@ public class DefaultAuditManager
|
|||
return events;
|
||||
}
|
||||
|
||||
public void addAuditEvent( AuditEvent event )
|
||||
public void addAuditEvent( MetadataRepository repository, AuditEvent event )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
// ignore those with no repository - they will still be logged to the textual audit log
|
||||
if ( event.getRepositoryId() != null )
|
||||
{
|
||||
metadataRepository.addMetadataFacet( event.getRepositoryId(), event );
|
||||
repository.addMetadataFacet( event.getRepositoryId(), event );
|
||||
}
|
||||
}
|
||||
|
||||
public void deleteAuditEvents( String repositoryId )
|
||||
public void deleteAuditEvents( MetadataRepository metadataRepository, String repositoryId )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
metadataRepository.removeMetadataFacets( repositoryId, AuditEvent.FACET_ID );
|
||||
}
|
||||
|
||||
public List<AuditEvent> getAuditEventsInRange( Collection<String> repositoryIds, Date startTime, Date endTime )
|
||||
public List<AuditEvent> getAuditEventsInRange( MetadataRepository metadataRepository,
|
||||
Collection<String> repositoryIds, Date startTime, Date endTime )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
return getAuditEventsInRange( repositoryIds, null, startTime, endTime );
|
||||
return getAuditEventsInRange( metadataRepository, repositoryIds, null, startTime, endTime );
|
||||
}
|
||||
|
||||
public List<AuditEvent> getAuditEventsInRange( Collection<String> repositoryIds, String resource, Date startTime,
|
||||
public List<AuditEvent> getAuditEventsInRange( MetadataRepository metadataRepository,
|
||||
Collection<String> repositoryIds, String resource, Date startTime,
|
||||
Date endTime )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
|
@ -150,11 +148,6 @@ public class DefaultAuditManager
|
|||
return fmt;
|
||||
}
|
||||
|
||||
public void setMetadataRepository( MetadataRepository metadataRepository )
|
||||
{
|
||||
this.metadataRepository = metadataRepository;
|
||||
}
|
||||
|
||||
private static final class AuditRecord
|
||||
implements Comparable<AuditRecord>
|
||||
{
|
||||
|
|
|
@ -20,6 +20,8 @@ package org.apache.archiva.audit;
|
|||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
|
@ -36,20 +38,34 @@ public class MetadataAuditListener
|
|||
*/
|
||||
private AuditManager auditManager;
|
||||
|
||||
/**
|
||||
* FIXME: this could be multiple implementations and needs to be configured. It also starts a separate session to
|
||||
* the originator of the audit event that we may rather want to pass through.
|
||||
*
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private RepositorySessionFactory repositorySessionFactory;
|
||||
|
||||
public void auditEvent( AuditEvent event )
|
||||
{
|
||||
// for now we only log upload events, some of the others are quite noisy
|
||||
if ( event.getAction().equals( AuditEvent.CREATE_FILE ) || event.getAction().equals( AuditEvent.UPLOAD_FILE ) ||
|
||||
event.getAction().equals( AuditEvent.MERGING_REPOSITORIES ) )
|
||||
{
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
auditManager.addAuditEvent( event );
|
||||
auditManager.addAuditEvent( repositorySession.getRepository(), event );
|
||||
repositorySession.save();
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to write audit event to repository: " + e.getMessage(), e );
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,7 +78,6 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
|
||||
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
|
||||
auditManager.setMetadataRepository( metadataRepository );
|
||||
|
||||
ManagedRepositoryConfiguration repository = new ManagedRepositoryConfiguration();
|
||||
repository.setId( TEST_REPO_ID );
|
||||
|
@ -108,7 +107,8 @@ public class AuditManagerTest
|
|||
}
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
List<AuditEvent> events = auditManager.getMostRecentAuditEvents( Collections.singletonList( TEST_REPO_ID ) );
|
||||
List<AuditEvent> events = auditManager.getMostRecentAuditEvents( metadataRepository, Collections.singletonList(
|
||||
TEST_REPO_ID ) );
|
||||
assertNotNull( events );
|
||||
assertEquals( numEvents - 1, events.size() );
|
||||
int expectedTimestampCounter = numEvents - 1;
|
||||
|
@ -144,7 +144,8 @@ public class AuditManagerTest
|
|||
}
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
List<AuditEvent> events = auditManager.getMostRecentAuditEvents( Collections.singletonList( TEST_REPO_ID ) );
|
||||
List<AuditEvent> events = auditManager.getMostRecentAuditEvents( metadataRepository, Collections.singletonList(
|
||||
TEST_REPO_ID ) );
|
||||
assertNotNull( events );
|
||||
assertEquals( numEvents, events.size() );
|
||||
int expectedTimestampCounter = numEvents - 1;
|
||||
|
@ -190,7 +191,8 @@ public class AuditManagerTest
|
|||
}
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
events = auditManager.getMostRecentAuditEvents( Arrays.asList( TEST_REPO_ID, TEST_REPO_ID_2 ) );
|
||||
events = auditManager.getMostRecentAuditEvents( metadataRepository, Arrays.asList( TEST_REPO_ID,
|
||||
TEST_REPO_ID_2 ) );
|
||||
assertNotNull( events );
|
||||
assertEquals( numEvents - 1, events.size() );
|
||||
int expectedTimestampCounter = numEvents - 1;
|
||||
|
@ -214,7 +216,8 @@ public class AuditManagerTest
|
|||
Collections.emptyList() );
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
assertTrue( auditManager.getMostRecentAuditEvents( Collections.singletonList( TEST_REPO_ID ) ).isEmpty() );
|
||||
assertTrue( auditManager.getMostRecentAuditEvents( metadataRepository, Collections.singletonList(
|
||||
TEST_REPO_ID ) ).isEmpty() );
|
||||
|
||||
metadataRepositoryControl.verify();
|
||||
}
|
||||
|
@ -229,7 +232,7 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
auditManager.addAuditEvent( event );
|
||||
auditManager.addAuditEvent( metadataRepository, event );
|
||||
|
||||
metadataRepositoryControl.verify();
|
||||
}
|
||||
|
@ -244,7 +247,7 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
auditManager.addAuditEvent( event );
|
||||
auditManager.addAuditEvent( metadataRepository, event );
|
||||
|
||||
metadataRepositoryControl.verify();
|
||||
}
|
||||
|
@ -257,7 +260,7 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
auditManager.deleteAuditEvents( TEST_REPO_ID );
|
||||
auditManager.deleteAuditEvents( metadataRepository, TEST_REPO_ID );
|
||||
|
||||
metadataRepositoryControl.verify();
|
||||
}
|
||||
|
@ -286,9 +289,8 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
|
||||
new Date( current.getTime() - 4000 ), new Date(
|
||||
current.getTime() - 2000 ) );
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
|
||||
TEST_REPO_ID ), new Date( current.getTime() - 4000 ), new Date( current.getTime() - 2000 ) );
|
||||
|
||||
assertEquals( 1, events.size() );
|
||||
assertTestEvent( events.get( 0 ), TIMESTAMP_FORMAT.format( expectedTimestamp ), expectedEvent.getResource() );
|
||||
|
@ -323,8 +325,8 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
|
||||
new Date( current.getTime() - 4000 ), current );
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
|
||||
TEST_REPO_ID ), new Date( current.getTime() - 4000 ), current );
|
||||
|
||||
assertEquals( 2, events.size() );
|
||||
assertTestEvent( events.get( 0 ), TIMESTAMP_FORMAT.format( ts3 ), expectedEvent3.getResource() );
|
||||
|
@ -360,9 +362,8 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
|
||||
new Date( current.getTime() - 20000 ), new Date(
|
||||
current.getTime() - 2000 ) );
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
|
||||
TEST_REPO_ID ), new Date( current.getTime() - 20000 ), new Date( current.getTime() - 2000 ) );
|
||||
|
||||
assertEquals( 2, events.size() );
|
||||
assertTestEvent( events.get( 0 ), TIMESTAMP_FORMAT.format( expectedTimestamp ), expectedEvent2.getResource() );
|
||||
|
@ -403,8 +404,8 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
|
||||
new Date( current.getTime() - 20000 ), current );
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
|
||||
TEST_REPO_ID ), new Date( current.getTime() - 20000 ), current );
|
||||
|
||||
assertEquals( 3, events.size() );
|
||||
assertTestEvent( events.get( 0 ), TIMESTAMP_FORMAT.format( ts3 ), expectedEvent3.getResource() );
|
||||
|
@ -447,9 +448,8 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
|
||||
TEST_RESOURCE_BASE, new Date(
|
||||
current.getTime() - 20000 ), current );
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
|
||||
TEST_REPO_ID ), TEST_RESOURCE_BASE, new Date( current.getTime() - 20000 ), current );
|
||||
|
||||
assertEquals( 2, events.size() );
|
||||
assertTestEvent( events.get( 0 ), TIMESTAMP_FORMAT.format( ts3 ), expectedEvent3.getResource() );
|
||||
|
@ -489,8 +489,8 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ), "foo",
|
||||
new Date( current.getTime() - 20000 ), current );
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
|
||||
TEST_REPO_ID ), "foo", new Date( current.getTime() - 20000 ), current );
|
||||
|
||||
assertEquals( 0, events.size() );
|
||||
|
||||
|
@ -533,7 +533,8 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( Arrays.asList( TEST_REPO_ID, TEST_REPO_ID_2 ),
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Arrays.asList( TEST_REPO_ID,
|
||||
TEST_REPO_ID_2 ),
|
||||
new Date( current.getTime() - 20000 ), current );
|
||||
|
||||
assertEquals( 3, events.size() );
|
||||
|
@ -561,9 +562,8 @@ public class AuditManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( Collections.singletonList( TEST_REPO_ID ),
|
||||
new Date( current.getTime() - 20000 ), new Date(
|
||||
current.getTime() - 16000 ) );
|
||||
List<AuditEvent> events = auditManager.getAuditEventsInRange( metadataRepository, Collections.singletonList(
|
||||
TEST_REPO_ID ), new Date( current.getTime() - 20000 ), new Date( current.getTime() - 16000 ) );
|
||||
|
||||
assertEquals( 0, events.size() );
|
||||
|
||||
|
|
|
@ -29,10 +29,6 @@
|
|||
<artifactId>maven2-repository</artifactId>
|
||||
<name>Maven 2.x Repository Support</name>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.apache.archiva</groupId>
|
||||
<artifactId>problem-reports</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.archiva</groupId>
|
||||
<artifactId>metadata-model</artifactId>
|
||||
|
|
|
@ -21,6 +21,8 @@ package org.apache.archiva.dependency.tree.maven2;
|
|||
|
||||
import org.apache.archiva.metadata.repository.MetadataResolutionException;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolver;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
|
||||
import org.apache.archiva.metadata.repository.storage.maven2.RepositoryModelResolver;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
|
@ -100,9 +102,11 @@ public class DefaultDependencyTreeBuilder
|
|||
private ModelBuilder builder;
|
||||
|
||||
/**
|
||||
* TODO: can have other types, and this might eventually come through from the main request
|
||||
*
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataResolver metadataResolver;
|
||||
private RepositorySessionFactory repositorySessionFactory;
|
||||
|
||||
/**
|
||||
* @plexus.requirement role-hint="maven2"
|
||||
|
@ -138,12 +142,21 @@ public class DefaultDependencyTreeBuilder
|
|||
|
||||
Set<Artifact> dependencyArtifacts = createArtifacts( model, null );
|
||||
|
||||
ArtifactMetadataSource metadataSource = new MetadataArtifactMetadataSource( repositoryIds );
|
||||
RepositorySession repositorySession = repositorySessionFactory.createSession();
|
||||
try
|
||||
{
|
||||
ArtifactMetadataSource metadataSource = new MetadataArtifactMetadataSource( repositoryIds,
|
||||
repositorySession );
|
||||
|
||||
// Note that we don't permit going to external repositories. We don't need to pass in a local and remote
|
||||
// since our metadata source has control over them
|
||||
collector.collect( dependencyArtifacts, projectArtifact, managedVersions, null, null, metadataSource, null,
|
||||
Collections.singletonList( listener ) );
|
||||
// Note that we don't permit going to external repositories. We don't need to pass in a local and remote
|
||||
// since our metadata source has control over them
|
||||
collector.collect( dependencyArtifacts, projectArtifact, managedVersions, null, null, metadataSource,
|
||||
null, Collections.singletonList( listener ) );
|
||||
}
|
||||
finally
|
||||
{
|
||||
repositorySession.close();
|
||||
}
|
||||
|
||||
DependencyNode rootNode = listener.getRootNode();
|
||||
|
||||
|
@ -347,9 +360,15 @@ public class DefaultDependencyTreeBuilder
|
|||
{
|
||||
private final List<String> repositoryIds;
|
||||
|
||||
public MetadataArtifactMetadataSource( List<String> repositoryIds )
|
||||
private final RepositorySession session;
|
||||
|
||||
private final MetadataResolver resolver;
|
||||
|
||||
public MetadataArtifactMetadataSource( List<String> repositoryIds, RepositorySession session )
|
||||
{
|
||||
this.repositoryIds = repositoryIds;
|
||||
this.session = session;
|
||||
resolver = this.session.getResolver();
|
||||
}
|
||||
|
||||
// modified version from MavenMetadataSource to work with the simpler environment
|
||||
|
@ -425,8 +444,8 @@ public class DefaultDependencyTreeBuilder
|
|||
Collection<String> projectVersions;
|
||||
try
|
||||
{
|
||||
projectVersions = metadataResolver.resolveProjectVersions( repoId, artifact.getGroupId(),
|
||||
artifact.getArtifactId() );
|
||||
projectVersions = resolver.resolveProjectVersions( session, repoId, artifact.getGroupId(),
|
||||
artifact.getArtifactId() );
|
||||
}
|
||||
catch ( MetadataResolutionException e )
|
||||
{
|
||||
|
|
|
@ -24,13 +24,11 @@ import org.apache.archiva.checksum.ChecksummedFile;
|
|||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.model.ProjectMetadata;
|
||||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolutionException;
|
||||
import org.apache.archiva.metadata.repository.filter.Filter;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorage;
|
||||
import org.apache.archiva.reports.RepositoryProblemFacet;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException;
|
||||
import org.apache.maven.archiva.common.utils.VersionUtil;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
|
@ -61,6 +59,14 @@ import java.util.Date;
|
|||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Maven 2 repository format storage implementation. This class currently takes parameters to indicate the repository to
|
||||
* deal with rather than being instantiated per-repository.
|
||||
* FIXME: instantiate one per repository and allocate permanently from a factory (which can be obtained within the session).
|
||||
* TODO: finish Maven 1 implementation to prove this API
|
||||
*
|
||||
* The session is passed in as an argument to obtain any necessary resources, rather than the class being instantiated
|
||||
* within the session in the context of a single managed repository's resolution needs.
|
||||
*
|
||||
* @plexus.component role="org.apache.archiva.metadata.repository.storage.RepositoryStorage" role-hint="maven2"
|
||||
*/
|
||||
public class Maven2RepositoryStorage
|
||||
|
@ -81,24 +87,10 @@ public class Maven2RepositoryStorage
|
|||
*/
|
||||
private RepositoryPathTranslator pathTranslator;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
private final static Logger log = LoggerFactory.getLogger( Maven2RepositoryStorage.class );
|
||||
|
||||
private static final String METADATA_FILENAME = "maven-metadata.xml";
|
||||
|
||||
private static final String PROBLEM_MISSING_POM = "missing-pom";
|
||||
|
||||
private static final String PROBLEM_INVALID_POM = "invalid-pom";
|
||||
|
||||
private static final String PROBLEM_MISLOCATED_POM = "mislocated-pom";
|
||||
|
||||
private static final List<String> POTENTIAL_PROBLEMS = Arrays.asList( PROBLEM_INVALID_POM, PROBLEM_MISSING_POM,
|
||||
PROBLEM_MISLOCATED_POM );
|
||||
|
||||
public ProjectMetadata readProjectMetadata( String repoId, String namespace, String projectId )
|
||||
{
|
||||
// TODO: could natively implement the "shared model" concept from the browse action to avoid needing it there?
|
||||
|
@ -107,22 +99,8 @@ public class Maven2RepositoryStorage
|
|||
|
||||
public ProjectVersionMetadata readProjectVersionMetadata( String repoId, String namespace, String projectId,
|
||||
String projectVersion )
|
||||
throws MetadataResolutionException
|
||||
throws RepositoryStorageMetadataNotFoundException, RepositoryStorageMetadataInvalidException
|
||||
{
|
||||
// Remove problems associated with this version, since we'll be re-adding any that still exist
|
||||
// TODO: an event mechanism would remove coupling to the problem reporting plugin
|
||||
// TODO: this removes all problems - do we need something that just removes the problems created by this resolver?
|
||||
String name = RepositoryProblemFacet.createName( namespace, projectId, projectVersion, null );
|
||||
try
|
||||
{
|
||||
metadataRepository.removeMetadataFacet( repoId, RepositoryProblemFacet.FACET_ID, name );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to remove repository problem facets for the version being removed: " + e.getMessage(),
|
||||
e );
|
||||
}
|
||||
|
||||
ManagedRepositoryConfiguration repositoryConfiguration =
|
||||
archivaConfiguration.getConfiguration().findManagedRepositoryById( repoId );
|
||||
|
||||
|
@ -160,12 +138,9 @@ public class Maven2RepositoryStorage
|
|||
|
||||
if ( !file.exists() )
|
||||
{
|
||||
// TODO: an event mechanism would remove coupling to the problem reporting plugin
|
||||
addProblemReport( repoId, namespace, projectId, projectVersion, PROBLEM_MISSING_POM,
|
||||
"The artifact's POM file '" + file + "' was missing" );
|
||||
|
||||
// metadata could not be resolved
|
||||
return null;
|
||||
throw new RepositoryStorageMetadataNotFoundException(
|
||||
"The artifact's POM file '" + file.getAbsolutePath() + "' was missing" );
|
||||
}
|
||||
|
||||
ModelBuildingRequest req = new DefaultModelBuildingRequest();
|
||||
|
@ -181,10 +156,9 @@ public class Maven2RepositoryStorage
|
|||
}
|
||||
catch ( ModelBuildingException e )
|
||||
{
|
||||
addProblemReport( repoId, namespace, projectId, projectVersion, PROBLEM_INVALID_POM,
|
||||
"The artifact's POM file '" + file + "' was invalid: " + e.getMessage() );
|
||||
String msg = "The artifact's POM file '" + file + "' was invalid: " + e.getMessage();
|
||||
|
||||
throw new MetadataResolutionException( e.getMessage() );
|
||||
throw new RepositoryStorageMetadataInvalidException( "invalid-pom", msg, e );
|
||||
}
|
||||
|
||||
// Check if the POM is in the correct location
|
||||
|
@ -207,10 +181,7 @@ public class Maven2RepositoryStorage
|
|||
message.append( "\nIncorrect version: " ).append( model.getVersion() );
|
||||
}
|
||||
|
||||
String msg = message.toString();
|
||||
addProblemReport( repoId, namespace, projectId, projectVersion, PROBLEM_MISLOCATED_POM, msg );
|
||||
|
||||
throw new MetadataResolutionException( msg );
|
||||
throw new RepositoryStorageMetadataInvalidException( "mislocated-pom", message.toString() );
|
||||
}
|
||||
|
||||
ProjectVersionMetadata metadata = new ProjectVersionMetadata();
|
||||
|
@ -243,29 +214,6 @@ public class Maven2RepositoryStorage
|
|||
return metadata;
|
||||
}
|
||||
|
||||
private void addProblemReport( String repoId, String namespace, String projectId, String projectVersion,
|
||||
String problemId, String message )
|
||||
{
|
||||
// TODO: an event mechanism would remove coupling to the problem reporting plugin and allow other plugins to
|
||||
// generate metadata on the fly if appropriately checked for missing facets in the resolver
|
||||
RepositoryProblemFacet problem = new RepositoryProblemFacet();
|
||||
problem.setProblem( problemId );
|
||||
problem.setMessage( message );
|
||||
problem.setProject( projectId );
|
||||
problem.setNamespace( namespace );
|
||||
problem.setRepositoryId( repoId );
|
||||
problem.setVersion( projectVersion );
|
||||
|
||||
try
|
||||
{
|
||||
metadataRepository.addMetadataFacet( repoId, problem );
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to add repository problem facets for the version being removed: " + e.getMessage(), e );
|
||||
}
|
||||
}
|
||||
|
||||
private List<org.apache.archiva.metadata.model.Dependency> convertDependencies( List<Dependency> dependencies )
|
||||
{
|
||||
List<org.apache.archiva.metadata.model.Dependency> l =
|
||||
|
|
|
@ -25,43 +25,53 @@ import org.apache.archiva.metadata.model.ProjectVersionReference;
|
|||
|
||||
import java.util.Collection;
|
||||
|
||||
// FIXME: remove - this is useless, better to mock it or avoid needing it
|
||||
public class TestMetadataResolver
|
||||
implements MetadataResolver
|
||||
{
|
||||
public ProjectVersionMetadata resolveProjectVersion( String repoId, String namespace, String projectId,
|
||||
String projectVersion )
|
||||
public ProjectVersionMetadata resolveProjectVersion( RepositorySession session, String repoId, String namespace,
|
||||
String projectId, String projectVersion )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
return null; //To change body of implemented methods use File | Settings | File Templates.
|
||||
}
|
||||
|
||||
public Collection<ProjectVersionReference> resolveProjectReferences( String repoId, String namespace,
|
||||
String projectId, String projectVersion )
|
||||
public Collection<ProjectVersionReference> resolveProjectReferences( RepositorySession session, String repoId,
|
||||
String namespace, String projectId,
|
||||
String projectVersion )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
return null; //To change body of implemented methods use File | Settings | File Templates.
|
||||
}
|
||||
|
||||
public Collection<String> resolveRootNamespaces( String repoId )
|
||||
public Collection<String> resolveRootNamespaces( RepositorySession session, String repoId )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
return null; //To change body of implemented methods use File | Settings | File Templates.
|
||||
}
|
||||
|
||||
public Collection<String> resolveNamespaces( String repoId, String namespace )
|
||||
public Collection<String> resolveNamespaces( RepositorySession session, String repoId, String namespace )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
return null; //To change body of implemented methods use File | Settings | File Templates.
|
||||
}
|
||||
|
||||
public Collection<String> resolveProjects( String repoId, String namespace )
|
||||
public Collection<String> resolveProjects( RepositorySession session, String repoId, String namespace )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
return null; //To change body of implemented methods use File | Settings | File Templates.
|
||||
}
|
||||
|
||||
public Collection<String> resolveProjectVersions( String repoId, String namespace, String projectId )
|
||||
public Collection<String> resolveProjectVersions( RepositorySession session, String repoId, String namespace,
|
||||
String projectId )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
return null; //To change body of implemented methods use File | Settings | File Templates.
|
||||
}
|
||||
|
||||
public Collection<ArtifactMetadata> resolveArtifacts( String repoId, String namespace, String projectId,
|
||||
String projectVersion )
|
||||
public Collection<ArtifactMetadata> resolveArtifacts( RepositorySession session, String repoId, String namespace,
|
||||
String projectId, String projectVersion )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
return null; //To change body of implemented methods use File | Settings | File Templates.
|
||||
}
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
package org.apache.archiva.metadata.repository;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
public class TestRepositorySessionFactory
|
||||
implements RepositorySessionFactory
|
||||
{
|
||||
private MetadataRepository metadataRepository = new TestMetadataRepository();
|
||||
|
||||
private MetadataResolver resolver = new TestMetadataResolver();
|
||||
|
||||
public RepositorySession createSession()
|
||||
{
|
||||
return new RepositorySession( metadataRepository, resolver );
|
||||
}
|
||||
|
||||
public void setMetadataRepository( MetadataRepository metadataRepository )
|
||||
{
|
||||
this.metadataRepository = metadataRepository;
|
||||
}
|
||||
}
|
|
@ -21,17 +21,15 @@ package org.apache.archiva.metadata.repository.storage.maven2;
|
|||
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.model.Dependency;
|
||||
import org.apache.archiva.metadata.model.FacetedMetadata;
|
||||
import org.apache.archiva.metadata.model.License;
|
||||
import org.apache.archiva.metadata.model.MailingList;
|
||||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolutionException;
|
||||
import org.apache.archiva.metadata.repository.filter.AllFilter;
|
||||
import org.apache.archiva.metadata.repository.filter.ExcludesFilter;
|
||||
import org.apache.archiva.metadata.repository.filter.Filter;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorage;
|
||||
import org.apache.archiva.reports.RepositoryProblemFacet;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataInvalidException;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataNotFoundException;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
|
@ -49,7 +47,7 @@ public class Maven2RepositoryMetadataResolverTest
|
|||
{
|
||||
private static final Filter<String> ALL = new AllFilter<String>();
|
||||
|
||||
private Maven2RepositoryStorage resolver;
|
||||
private Maven2RepositoryStorage storage;
|
||||
|
||||
private static final String TEST_REPO_ID = "test";
|
||||
|
||||
|
@ -63,8 +61,6 @@ public class Maven2RepositoryMetadataResolverTest
|
|||
|
||||
private static final String EMPTY_SHA1 = "da39a3ee5e6b4b0d3255bfef95601890afd80709";
|
||||
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
public void setUp()
|
||||
throws Exception
|
||||
{
|
||||
|
@ -78,16 +74,14 @@ public class Maven2RepositoryMetadataResolverTest
|
|||
c.addManagedRepository( testRepo );
|
||||
configuration.save( c );
|
||||
|
||||
resolver = (Maven2RepositoryStorage) lookup( RepositoryStorage.class, "maven2" );
|
||||
metadataRepository = (MetadataRepository) lookup( MetadataRepository.class );
|
||||
metadataRepository.removeMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID );
|
||||
storage = (Maven2RepositoryStorage) lookup( RepositoryStorage.class, "maven2" );
|
||||
}
|
||||
|
||||
public void testGetProjectVersionMetadata()
|
||||
throws Exception
|
||||
{
|
||||
ProjectVersionMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "org.apache.archiva",
|
||||
"archiva-common", "1.2.1" );
|
||||
ProjectVersionMetadata metadata = storage.readProjectVersionMetadata( TEST_REPO_ID, "org.apache.archiva",
|
||||
"archiva-common", "1.2.1" );
|
||||
MavenProjectFacet facet = (MavenProjectFacet) metadata.getFacet( MavenProjectFacet.FACET_ID );
|
||||
assertEquals( "jar", facet.getPackaging() );
|
||||
assertEquals( "http://archiva.apache.org/ref/1.2.1/archiva-base/archiva-common", metadata.getUrl() );
|
||||
|
@ -140,9 +134,9 @@ public class Maven2RepositoryMetadataResolverTest
|
|||
public void testGetArtifactMetadata()
|
||||
throws Exception
|
||||
{
|
||||
Collection<ArtifactMetadata> springArtifacts = resolver.readArtifactsMetadata( TEST_REPO_ID,
|
||||
"org.codehaus.plexus",
|
||||
"plexus-spring", "1.2", ALL );
|
||||
Collection<ArtifactMetadata> springArtifacts = storage.readArtifactsMetadata( TEST_REPO_ID,
|
||||
"org.codehaus.plexus",
|
||||
"plexus-spring", "1.2", ALL );
|
||||
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( springArtifacts );
|
||||
Collections.sort( artifacts, new Comparator<ArtifactMetadata>()
|
||||
{
|
||||
|
@ -182,9 +176,9 @@ public class Maven2RepositoryMetadataResolverTest
|
|||
public void testGetArtifactMetadataSnapshots()
|
||||
throws Exception
|
||||
{
|
||||
Collection<ArtifactMetadata> testArtifacts = resolver.readArtifactsMetadata( TEST_REPO_ID, "com.example.test",
|
||||
"test-artifact", "1.0-SNAPSHOT",
|
||||
ALL );
|
||||
Collection<ArtifactMetadata> testArtifacts = storage.readArtifactsMetadata( TEST_REPO_ID, "com.example.test",
|
||||
"test-artifact", "1.0-SNAPSHOT",
|
||||
ALL );
|
||||
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( testArtifacts );
|
||||
Collections.sort( artifacts, new Comparator<ArtifactMetadata>()
|
||||
{
|
||||
|
@ -265,8 +259,8 @@ public class Maven2RepositoryMetadataResolverTest
|
|||
public void testGetProjectVersionMetadataForTimestampedSnapshot()
|
||||
throws Exception
|
||||
{
|
||||
ProjectVersionMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "org.apache", "apache",
|
||||
"5-SNAPSHOT" );
|
||||
ProjectVersionMetadata metadata = storage.readProjectVersionMetadata( TEST_REPO_ID, "org.apache", "apache",
|
||||
"5-SNAPSHOT" );
|
||||
MavenProjectFacet facet = (MavenProjectFacet) metadata.getFacet( MavenProjectFacet.FACET_ID );
|
||||
assertEquals( "pom", facet.getPackaging() );
|
||||
assertEquals( "http://www.apache.org/", metadata.getUrl() );
|
||||
|
@ -302,169 +296,175 @@ public class Maven2RepositoryMetadataResolverTest
|
|||
public void testGetProjectVersionMetadataForTimestampedSnapshotMissingMetadata()
|
||||
throws Exception
|
||||
{
|
||||
FacetedMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test",
|
||||
"missing-metadata", "1.0-SNAPSHOT" );
|
||||
assertNull( metadata );
|
||||
try
|
||||
{
|
||||
storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "missing-metadata", "1.0-SNAPSHOT" );
|
||||
fail( "Should not be found" );
|
||||
}
|
||||
catch ( RepositoryStorageMetadataNotFoundException e )
|
||||
{
|
||||
assertEquals( "missing-pom", e.getId() );
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetProjectVersionMetadataForTimestampedSnapshotMalformedMetadata()
|
||||
throws Exception
|
||||
{
|
||||
FacetedMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test",
|
||||
"malformed-metadata", "1.0-SNAPSHOT" );
|
||||
assertNull( metadata );
|
||||
try
|
||||
{
|
||||
storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "malformed-metadata",
|
||||
"1.0-SNAPSHOT" );
|
||||
fail( "Should not be found" );
|
||||
}
|
||||
catch ( RepositoryStorageMetadataNotFoundException e )
|
||||
{
|
||||
assertEquals( "missing-pom", e.getId() );
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetProjectVersionMetadataForTimestampedSnapshotIncompleteMetadata()
|
||||
throws Exception
|
||||
{
|
||||
FacetedMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test",
|
||||
"incomplete-metadata", "1.0-SNAPSHOT" );
|
||||
assertNull( metadata );
|
||||
try
|
||||
{
|
||||
storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "incomplete-metadata",
|
||||
"1.0-SNAPSHOT" );
|
||||
fail( "Should not be found" );
|
||||
}
|
||||
catch ( RepositoryStorageMetadataNotFoundException e )
|
||||
{
|
||||
assertEquals( "missing-pom", e.getId() );
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetProjectVersionMetadataForInvalidPom()
|
||||
throws Exception
|
||||
{
|
||||
assertTrue( metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID ).isEmpty() );
|
||||
|
||||
try
|
||||
{
|
||||
resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "invalid-pom", "1.0" );
|
||||
storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "invalid-pom", "1.0" );
|
||||
fail( "Should have received an exception due to invalid POM" );
|
||||
}
|
||||
catch ( MetadataResolutionException e )
|
||||
catch ( RepositoryStorageMetadataInvalidException e )
|
||||
{
|
||||
assertFalse( metadataRepository.getMetadataFacets( TEST_REPO_ID,
|
||||
RepositoryProblemFacet.FACET_ID ).isEmpty() );
|
||||
RepositoryProblemFacet facet = (RepositoryProblemFacet) metadataRepository.getMetadataFacet( TEST_REPO_ID,
|
||||
RepositoryProblemFacet.FACET_ID,
|
||||
"com.example.test/invalid-pom/1.0" );
|
||||
assertEquals( "invalid-pom", facet.getProblem() );
|
||||
assertEquals( "invalid-pom", e.getId() );
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetProjectVersionMetadataForMislocatedPom()
|
||||
throws Exception
|
||||
{
|
||||
assertTrue( metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID ).isEmpty() );
|
||||
|
||||
try
|
||||
{
|
||||
resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "mislocated-pom", "1.0" );
|
||||
storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "mislocated-pom", "1.0" );
|
||||
fail( "Should have received an exception due to mislocated POM" );
|
||||
}
|
||||
catch ( MetadataResolutionException e )
|
||||
catch ( RepositoryStorageMetadataInvalidException e )
|
||||
{
|
||||
assertFalse( metadataRepository.getMetadataFacets( TEST_REPO_ID,
|
||||
RepositoryProblemFacet.FACET_ID ).isEmpty() );
|
||||
RepositoryProblemFacet facet = (RepositoryProblemFacet) metadataRepository.getMetadataFacet( TEST_REPO_ID,
|
||||
RepositoryProblemFacet.FACET_ID,
|
||||
"com.example.test/mislocated-pom/1.0" );
|
||||
assertEquals( "mislocated-pom", facet.getProblem() );
|
||||
assertEquals( "mislocated-pom", e.getId() );
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetProjectVersionMetadataForMissingPom()
|
||||
throws Exception
|
||||
{
|
||||
assertTrue( metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID ).isEmpty() );
|
||||
|
||||
FacetedMetadata metadata = resolver.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "missing-pom",
|
||||
"1.0" );
|
||||
assertNull( metadata );
|
||||
|
||||
assertFalse( metadataRepository.getMetadataFacets( TEST_REPO_ID, RepositoryProblemFacet.FACET_ID ).isEmpty() );
|
||||
RepositoryProblemFacet facet = (RepositoryProblemFacet) metadataRepository.getMetadataFacet( TEST_REPO_ID,
|
||||
RepositoryProblemFacet.FACET_ID,
|
||||
"com.example.test/missing-pom/1.0" );
|
||||
assertEquals( "missing-pom", facet.getProblem() );
|
||||
|
||||
try
|
||||
{
|
||||
storage.readProjectVersionMetadata( TEST_REPO_ID, "com.example.test", "missing-pom", "1.0" );
|
||||
fail( "Should not be found" );
|
||||
}
|
||||
catch ( RepositoryStorageMetadataNotFoundException e )
|
||||
{
|
||||
assertEquals( "missing-pom", e.getId() );
|
||||
}
|
||||
}
|
||||
|
||||
public void testGetRootNamespaces()
|
||||
{
|
||||
assertEquals( Arrays.asList( "com", "org" ), resolver.listRootNamespaces( TEST_REPO_ID, ALL ) );
|
||||
assertEquals( Arrays.asList( "com", "org" ), storage.listRootNamespaces( TEST_REPO_ID, ALL ) );
|
||||
}
|
||||
|
||||
public void testGetNamespaces()
|
||||
{
|
||||
assertEquals( Arrays.asList( "example" ), resolver.listNamespaces( TEST_REPO_ID, "com", ALL ) );
|
||||
assertEquals( Arrays.asList( "test" ), resolver.listNamespaces( TEST_REPO_ID, "com.example", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), resolver.listNamespaces( TEST_REPO_ID, "com.example.test",
|
||||
ALL ) );
|
||||
assertEquals( Arrays.asList( "example" ), storage.listNamespaces( TEST_REPO_ID, "com", ALL ) );
|
||||
assertEquals( Arrays.asList( "test" ), storage.listNamespaces( TEST_REPO_ID, "com.example", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), storage.listNamespaces( TEST_REPO_ID, "com.example.test",
|
||||
ALL ) );
|
||||
|
||||
assertEquals( Arrays.asList( "apache", "codehaus" ), resolver.listNamespaces( TEST_REPO_ID, "org", ALL ) );
|
||||
assertEquals( Arrays.asList( "archiva", "maven" ), resolver.listNamespaces( TEST_REPO_ID, "org.apache", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), resolver.listNamespaces( TEST_REPO_ID, "org.apache.archiva",
|
||||
ALL ) );
|
||||
assertEquals( Arrays.asList( "plugins", "shared" ), resolver.listNamespaces( TEST_REPO_ID, "org.apache.maven",
|
||||
ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), resolver.listNamespaces( TEST_REPO_ID,
|
||||
"org.apache.maven.plugins", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), resolver.listNamespaces( TEST_REPO_ID, "org.apache.maven.shared",
|
||||
ALL ) );
|
||||
assertEquals( Arrays.asList( "apache", "codehaus" ), storage.listNamespaces( TEST_REPO_ID, "org", ALL ) );
|
||||
assertEquals( Arrays.asList( "archiva", "maven" ), storage.listNamespaces( TEST_REPO_ID, "org.apache", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), storage.listNamespaces( TEST_REPO_ID, "org.apache.archiva",
|
||||
ALL ) );
|
||||
assertEquals( Arrays.asList( "plugins", "shared" ), storage.listNamespaces( TEST_REPO_ID, "org.apache.maven",
|
||||
ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), storage.listNamespaces( TEST_REPO_ID, "org.apache.maven.plugins",
|
||||
ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), storage.listNamespaces( TEST_REPO_ID, "org.apache.maven.shared",
|
||||
ALL ) );
|
||||
|
||||
assertEquals( Arrays.asList( "plexus" ), resolver.listNamespaces( TEST_REPO_ID, "org.codehaus", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), resolver.listNamespaces( TEST_REPO_ID, "org.codehaus.plexus",
|
||||
ALL ) );
|
||||
assertEquals( Arrays.asList( "plexus" ), storage.listNamespaces( TEST_REPO_ID, "org.codehaus", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), storage.listNamespaces( TEST_REPO_ID, "org.codehaus.plexus",
|
||||
ALL ) );
|
||||
}
|
||||
|
||||
public void testGetProjects()
|
||||
{
|
||||
assertEquals( Collections.<String>emptyList(), resolver.listProjects( TEST_REPO_ID, "com", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), resolver.listProjects( TEST_REPO_ID, "com.example", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), storage.listProjects( TEST_REPO_ID, "com", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), storage.listProjects( TEST_REPO_ID, "com.example", ALL ) );
|
||||
assertEquals( Arrays.asList( "incomplete-metadata", "invalid-pom", "malformed-metadata", "mislocated-pom",
|
||||
"missing-metadata", "test-artifact" ), resolver.listProjects( TEST_REPO_ID,
|
||||
"com.example.test",
|
||||
ALL ) );
|
||||
"missing-metadata", "test-artifact" ), storage.listProjects( TEST_REPO_ID,
|
||||
"com.example.test",
|
||||
ALL ) );
|
||||
|
||||
assertEquals( Collections.<String>emptyList(), resolver.listProjects( TEST_REPO_ID, "org", ALL ) );
|
||||
assertEquals( Arrays.asList( "apache" ), resolver.listProjects( TEST_REPO_ID, "org.apache", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), storage.listProjects( TEST_REPO_ID, "org", ALL ) );
|
||||
assertEquals( Arrays.asList( "apache" ), storage.listProjects( TEST_REPO_ID, "org.apache", ALL ) );
|
||||
assertEquals( Arrays.asList( "archiva", "archiva-base", "archiva-common", "archiva-modules", "archiva-parent" ),
|
||||
resolver.listProjects( TEST_REPO_ID, "org.apache.archiva", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), resolver.listProjects( TEST_REPO_ID, "org.apache.maven", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), resolver.listProjects( TEST_REPO_ID, "org.apache.maven.plugins",
|
||||
ALL ) );
|
||||
assertEquals( Arrays.asList( "maven-downloader" ), resolver.listProjects( TEST_REPO_ID,
|
||||
"org.apache.maven.shared", ALL ) );
|
||||
storage.listProjects( TEST_REPO_ID, "org.apache.archiva", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), storage.listProjects( TEST_REPO_ID, "org.apache.maven", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), storage.listProjects( TEST_REPO_ID, "org.apache.maven.plugins",
|
||||
ALL ) );
|
||||
assertEquals( Arrays.asList( "maven-downloader" ), storage.listProjects( TEST_REPO_ID,
|
||||
"org.apache.maven.shared", ALL ) );
|
||||
}
|
||||
|
||||
public void testGetProjectVersions()
|
||||
{
|
||||
assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), resolver.listProjectVersions( TEST_REPO_ID, "com.example.test",
|
||||
"incomplete-metadata", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), resolver.listProjectVersions( TEST_REPO_ID, "com.example.test",
|
||||
"malformed-metadata", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), resolver.listProjectVersions( TEST_REPO_ID, "com.example.test",
|
||||
"missing-metadata", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.0" ), resolver.listProjectVersions( TEST_REPO_ID, "com.example.test",
|
||||
"invalid-pom", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), storage.listProjectVersions( TEST_REPO_ID, "com.example.test",
|
||||
"incomplete-metadata", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), storage.listProjectVersions( TEST_REPO_ID, "com.example.test",
|
||||
"malformed-metadata", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.0-SNAPSHOT" ), storage.listProjectVersions( TEST_REPO_ID, "com.example.test",
|
||||
"missing-metadata", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.0" ), storage.listProjectVersions( TEST_REPO_ID, "com.example.test",
|
||||
"invalid-pom", ALL ) );
|
||||
|
||||
assertEquals( Arrays.asList( "4", "5-SNAPSHOT" ), resolver.listProjectVersions( TEST_REPO_ID, "org.apache",
|
||||
"apache", ALL ) );
|
||||
assertEquals( Arrays.asList( "4", "5-SNAPSHOT" ), storage.listProjectVersions( TEST_REPO_ID, "org.apache",
|
||||
"apache", ALL ) );
|
||||
|
||||
assertEquals( Arrays.asList( "1.2.1", "1.2.2" ), resolver.listProjectVersions( TEST_REPO_ID,
|
||||
"org.apache.archiva", "archiva",
|
||||
ALL ) );
|
||||
assertEquals( Arrays.asList( "1.2.1" ), resolver.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
|
||||
"archiva-base", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.2.1" ), resolver.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
|
||||
"archiva-common", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.2.1" ), resolver.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
|
||||
"archiva-modules", ALL ) );
|
||||
assertEquals( Arrays.asList( "3" ), resolver.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
|
||||
"archiva-parent", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.2.1", "1.2.2" ), storage.listProjectVersions( TEST_REPO_ID,
|
||||
"org.apache.archiva", "archiva",
|
||||
ALL ) );
|
||||
assertEquals( Arrays.asList( "1.2.1" ), storage.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
|
||||
"archiva-base", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.2.1" ), storage.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
|
||||
"archiva-common", ALL ) );
|
||||
assertEquals( Arrays.asList( "1.2.1" ), storage.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
|
||||
"archiva-modules", ALL ) );
|
||||
assertEquals( Arrays.asList( "3" ), storage.listProjectVersions( TEST_REPO_ID, "org.apache.archiva",
|
||||
"archiva-parent", ALL ) );
|
||||
|
||||
assertEquals( Collections.<String>emptyList(), resolver.listProjectVersions( TEST_REPO_ID,
|
||||
"org.apache.maven.shared",
|
||||
"maven-downloader", ALL ) );
|
||||
assertEquals( Collections.<String>emptyList(), storage.listProjectVersions( TEST_REPO_ID,
|
||||
"org.apache.maven.shared",
|
||||
"maven-downloader", ALL ) );
|
||||
}
|
||||
|
||||
public void testGetArtifacts()
|
||||
{
|
||||
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( resolver.readArtifactsMetadata(
|
||||
TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.2", ALL ) );
|
||||
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( storage.readArtifactsMetadata( TEST_REPO_ID,
|
||||
"org.codehaus.plexus",
|
||||
"plexus-spring",
|
||||
"1.2",
|
||||
ALL ) );
|
||||
assertEquals( 3, artifacts.size() );
|
||||
Collections.sort( artifacts, new Comparator<ArtifactMetadata>()
|
||||
{
|
||||
|
@ -484,8 +484,11 @@ public class Maven2RepositoryMetadataResolverTest
|
|||
{
|
||||
ExcludesFilter<String> filter = new ExcludesFilter<String>( Collections.singletonList(
|
||||
"plexus-spring-1.2.pom" ) );
|
||||
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( resolver.readArtifactsMetadata(
|
||||
TEST_REPO_ID, "org.codehaus.plexus", "plexus-spring", "1.2", filter ) );
|
||||
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( storage.readArtifactsMetadata( TEST_REPO_ID,
|
||||
"org.codehaus.plexus",
|
||||
"plexus-spring",
|
||||
"1.2",
|
||||
filter ) );
|
||||
assertEquals( 2, artifacts.size() );
|
||||
Collections.sort( artifacts, new Comparator<ArtifactMetadata>()
|
||||
{
|
||||
|
@ -501,8 +504,11 @@ public class Maven2RepositoryMetadataResolverTest
|
|||
|
||||
public void testGetArtifactsTimestampedSnapshots()
|
||||
{
|
||||
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( resolver.readArtifactsMetadata(
|
||||
TEST_REPO_ID, "com.example.test", "missing-metadata", "1.0-SNAPSHOT", ALL ) );
|
||||
List<ArtifactMetadata> artifacts = new ArrayList<ArtifactMetadata>( storage.readArtifactsMetadata( TEST_REPO_ID,
|
||||
"com.example.test",
|
||||
"missing-metadata",
|
||||
"1.0-SNAPSHOT",
|
||||
ALL ) );
|
||||
assertEquals( 1, artifacts.size() );
|
||||
|
||||
ArtifactMetadata artifact = artifacts.get( 0 );
|
||||
|
|
|
@ -29,5 +29,9 @@
|
|||
<role>org.apache.maven.archiva.configuration.ArchivaConfiguration</role>
|
||||
<implementation>org.apache.archiva.configuration.TestConfiguration</implementation>
|
||||
</component>
|
||||
<component>
|
||||
<role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
|
||||
<implementation>org.apache.archiva.metadata.repository.TestRepositorySessionFactory</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
|
@ -27,5 +27,9 @@
|
|||
<role>org.apache.archiva.metadata.repository.MetadataRepository</role>
|
||||
<implementation>org.apache.archiva.metadata.repository.TestMetadataRepository</implementation>
|
||||
</component>
|
||||
<component>
|
||||
<role>org.apache.archiva.metadata.repository.RepositorySessionFactory</role>
|
||||
<implementation>org.apache.archiva.metadata.repository.TestRepositorySessionFactory</implementation>
|
||||
</component>
|
||||
</components>
|
||||
</component-set>
|
|
@ -59,21 +59,12 @@ import java.util.Properties;
|
|||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
/**
|
||||
* @plexus.component role="org.apache.archiva.metadata.repository.MetadataRepository"
|
||||
*/
|
||||
public class FileMetadataRepository
|
||||
implements MetadataRepository
|
||||
{
|
||||
/**
|
||||
* @plexus.requirement role="org.apache.archiva.metadata.model.MetadataFacetFactory"
|
||||
*/
|
||||
private Map<String, MetadataFacetFactory> metadataFacetFactories;
|
||||
private final Map<String, MetadataFacetFactory> metadataFacetFactories;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private ArchivaConfiguration configuration;
|
||||
private final ArchivaConfiguration configuration;
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger( FileMetadataRepository.class );
|
||||
|
||||
|
@ -85,6 +76,13 @@ public class FileMetadataRepository
|
|||
|
||||
private static final String METADATA_KEY = "metadata";
|
||||
|
||||
public FileMetadataRepository( Map<String, MetadataFacetFactory> metadataFacetFactories,
|
||||
ArchivaConfiguration configuration )
|
||||
{
|
||||
this.metadataFacetFactories = metadataFacetFactories;
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
private File getBaseDirectory( String repoId )
|
||||
{
|
||||
// TODO: should be configurable, like the index
|
||||
|
@ -1066,16 +1064,6 @@ public class FileMetadataRepository
|
|||
}
|
||||
}
|
||||
|
||||
public void setMetadataFacetFactories( Map<String, MetadataFacetFactory> metadataFacetFactories )
|
||||
{
|
||||
this.metadataFacetFactories = metadataFacetFactories;
|
||||
}
|
||||
|
||||
public void setConfiguration( ArchivaConfiguration configuration )
|
||||
{
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
private static class ArtifactComparator
|
||||
implements Comparator<ArtifactMetadata>
|
||||
{
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
package org.apache.archiva.metadata.repository.file;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.model.MetadataFacetFactory;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataResolver;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.RepositorySessionFactory;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* @plexus.component role="org.apache.archiva.metadata.repository.RepositorySessionFactory" role-hint="file"
|
||||
*/
|
||||
public class FileRepositorySessionFactory
|
||||
implements RepositorySessionFactory
|
||||
{
|
||||
/**
|
||||
* @plexus.requirement role="org.apache.archiva.metadata.model.MetadataFacetFactory"
|
||||
*/
|
||||
private Map<String, MetadataFacetFactory> metadataFacetFactories;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private ArchivaConfiguration configuration;
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataResolver metadataResolver;
|
||||
|
||||
public RepositorySession createSession()
|
||||
{
|
||||
MetadataRepository metadataRepository = new FileMetadataRepository( metadataFacetFactories, configuration );
|
||||
|
||||
return new RepositorySession( metadataRepository, metadataResolver );
|
||||
}
|
||||
}
|
|
@ -47,10 +47,7 @@ public class FileMetadataRepositoryTest
|
|||
ArchivaConfiguration config = createTestConfiguration( directory );
|
||||
Map<String, MetadataFacetFactory> factories = createTestMetadataFacetFactories();
|
||||
|
||||
FileMetadataRepository repository = new FileMetadataRepository();
|
||||
repository.setConfiguration( config );
|
||||
repository.setMetadataFacetFactories( factories );
|
||||
this.repository = repository;
|
||||
this.repository = new FileMetadataRepository( factories, config );
|
||||
}
|
||||
|
||||
protected static ArchivaConfiguration createTestConfiguration( File directory )
|
||||
|
|
|
@ -19,8 +19,11 @@ package org.apache.archiva.reports;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.model.ProjectVersionMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
import org.apache.archiva.metadata.repository.RepositorySession;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryStorageMetadataException;
|
||||
import org.apache.archiva.repository.events.RepositoryListener;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -35,12 +38,9 @@ public class RepositoryProblemEventListener
|
|||
{
|
||||
private Logger log = LoggerFactory.getLogger( RepositoryProblemEventListener.class );
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
public void deleteArtifact( String repositoryId, String namespace, String project, String version, String id )
|
||||
// FIXME: move to session
|
||||
public void deleteArtifact( MetadataRepository metadataRepository, String repositoryId, String namespace,
|
||||
String project, String version, String id )
|
||||
{
|
||||
String name = RepositoryProblemFacet.createName( namespace, project, version, id );
|
||||
|
||||
|
@ -53,4 +53,46 @@ public class RepositoryProblemEventListener
|
|||
log.warn( "Unable to remove metadata facet as part of delete event: " + e.getMessage(), e );
|
||||
}
|
||||
}
|
||||
|
||||
public void addArtifact( RepositorySession session, String repoId, String namespace, String projectId,
|
||||
ProjectVersionMetadata metadata )
|
||||
{
|
||||
// Remove problems associated with this version on successful addition
|
||||
// TODO: this removes all problems - do we need something that just remove the problems we know are corrected?
|
||||
String name = RepositoryProblemFacet.createName( namespace, projectId, metadata.getId(), null );
|
||||
try
|
||||
{
|
||||
MetadataRepository metadataRepository = session.getRepository();
|
||||
metadataRepository.removeMetadataFacet( repoId, RepositoryProblemFacet.FACET_ID, name );
|
||||
session.markDirty();
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to remove repository problem facets for the version being corrected in the repository: " +
|
||||
e.getMessage(), e );
|
||||
}
|
||||
}
|
||||
|
||||
public void addArtifactProblem( RepositorySession session, String repoId, String namespace, String projectId,
|
||||
String projectVersion, RepositoryStorageMetadataException exception )
|
||||
{
|
||||
RepositoryProblemFacet problem = new RepositoryProblemFacet();
|
||||
problem.setMessage( exception.getMessage() );
|
||||
problem.setProject( projectId );
|
||||
problem.setNamespace( namespace );
|
||||
problem.setRepositoryId( repoId );
|
||||
problem.setVersion( projectVersion );
|
||||
problem.setProblem( exception.getId() );
|
||||
|
||||
try
|
||||
{
|
||||
session.getRepository().addMetadataFacet( repoId, problem );
|
||||
session.markDirty();
|
||||
}
|
||||
catch ( MetadataRepositoryException e )
|
||||
{
|
||||
log.warn( "Unable to add repository problem facets for the version being removed: " + e.getMessage(), e );
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -44,14 +44,9 @@ public class DefaultRepositoryStatisticsManager
|
|||
{
|
||||
private static final Logger log = LoggerFactory.getLogger( DefaultRepositoryStatisticsManager.class );
|
||||
|
||||
/**
|
||||
* @plexus.requirement
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
private static final TimeZone UTC_TIME_ZONE = TimeZone.getTimeZone( "UTC" );
|
||||
|
||||
public RepositoryStatistics getLastStatistics( String repositoryId )
|
||||
public RepositoryStatistics getLastStatistics( MetadataRepository metadataRepository, String repositoryId )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
// TODO: consider a more efficient implementation that directly gets the last one from the content repository
|
||||
|
@ -69,12 +64,13 @@ public class DefaultRepositoryStatisticsManager
|
|||
}
|
||||
}
|
||||
|
||||
private void walkRepository( RepositoryStatistics stats, String repositoryId, String ns )
|
||||
private void walkRepository( MetadataRepository metadataRepository, RepositoryStatistics stats, String repositoryId,
|
||||
String ns )
|
||||
throws MetadataResolutionException
|
||||
{
|
||||
for ( String namespace : metadataRepository.getNamespaces( repositoryId, ns ) )
|
||||
{
|
||||
walkRepository( stats, repositoryId, ns + "." + namespace );
|
||||
walkRepository( metadataRepository, stats, repositoryId, ns + "." + namespace );
|
||||
}
|
||||
|
||||
Collection<String> projects = metadataRepository.getProjects( repositoryId, ns );
|
||||
|
@ -106,9 +102,8 @@ public class DefaultRepositoryStatisticsManager
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
public void addStatisticsAfterScan( String repositoryId, Date startTime, Date endTime, long totalFiles,
|
||||
long newFiles )
|
||||
public void addStatisticsAfterScan( MetadataRepository metadataRepository, String repositoryId, Date startTime,
|
||||
Date endTime, long totalFiles, long newFiles )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
RepositoryStatistics repositoryStatistics = new RepositoryStatistics();
|
||||
|
@ -131,7 +126,7 @@ public class DefaultRepositoryStatisticsManager
|
|||
{
|
||||
for ( String ns : metadataRepository.getRootNamespaces( repositoryId ) )
|
||||
{
|
||||
walkRepository( repositoryStatistics, repositoryId, ns );
|
||||
walkRepository( metadataRepository, repositoryStatistics, repositoryId, ns );
|
||||
}
|
||||
}
|
||||
catch ( MetadataResolutionException e )
|
||||
|
@ -143,13 +138,14 @@ public class DefaultRepositoryStatisticsManager
|
|||
metadataRepository.addMetadataFacet( repositoryId, repositoryStatistics );
|
||||
}
|
||||
|
||||
public void deleteStatistics( String repositoryId )
|
||||
public void deleteStatistics( MetadataRepository metadataRepository, String repositoryId )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
metadataRepository.removeMetadataFacets( repositoryId, RepositoryStatistics.FACET_ID );
|
||||
}
|
||||
|
||||
public List<RepositoryStatistics> getStatisticsInRange( String repositoryId, Date startTime, Date endTime )
|
||||
public List<RepositoryStatistics> getStatisticsInRange( MetadataRepository metadataRepository, String repositoryId,
|
||||
Date startTime, Date endTime )
|
||||
throws MetadataRepositoryException
|
||||
{
|
||||
List<RepositoryStatistics> results = new ArrayList<RepositoryStatistics>();
|
||||
|
@ -183,9 +179,4 @@ public class DefaultRepositoryStatisticsManager
|
|||
fmt.setTimeZone( UTC_TIME_ZONE );
|
||||
return fmt;
|
||||
}
|
||||
|
||||
public void setMetadataRepository( MetadataRepository metadataRepository )
|
||||
{
|
||||
this.metadataRepository = metadataRepository;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.archiva.metadata.repository.stats;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepositoryException;
|
||||
|
||||
import java.util.Date;
|
||||
|
@ -26,15 +27,17 @@ import java.util.List;
|
|||
|
||||
public interface RepositoryStatisticsManager
|
||||
{
|
||||
RepositoryStatistics getLastStatistics( String repositoryId )
|
||||
RepositoryStatistics getLastStatistics( MetadataRepository metadataRepository, String repositoryId )
|
||||
throws MetadataRepositoryException;
|
||||
|
||||
void addStatisticsAfterScan( String repositoryId, Date startTime, Date endTime, long totalFiles, long newFiles )
|
||||
void addStatisticsAfterScan( MetadataRepository metadataRepository, String repositoryId, Date startTime,
|
||||
Date endTime, long totalFiles, long newFiles )
|
||||
throws MetadataRepositoryException;
|
||||
|
||||
void deleteStatistics( String repositoryId )
|
||||
void deleteStatistics( MetadataRepository metadataRepository, String repositoryId )
|
||||
throws MetadataRepositoryException;
|
||||
|
||||
List<RepositoryStatistics> getStatisticsInRange( String repositoryId, Date startTime, Date endTime )
|
||||
List<RepositoryStatistics> getStatisticsInRange( MetadataRepository metadataRepository, String repositoryId,
|
||||
Date startTime, Date endTime )
|
||||
throws MetadataRepositoryException;
|
||||
}
|
||||
|
|
|
@ -71,7 +71,6 @@ public class RepositoryStatisticsManagerTest
|
|||
|
||||
metadataRepositoryControl = MockControl.createControl( MetadataRepository.class );
|
||||
metadataRepository = (MetadataRepository) metadataRepositoryControl.getMock();
|
||||
repositoryStatisticsManager.setMetadataRepository( metadataRepository );
|
||||
}
|
||||
|
||||
public void testGetLatestStats()
|
||||
|
@ -98,7 +97,7 @@ public class RepositoryStatisticsManagerTest
|
|||
SECOND_TEST_SCAN ), stats );
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
|
||||
stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
|
||||
assertNotNull( stats );
|
||||
assertEquals( 1314527915L, stats.getTotalArtifactFileSize() );
|
||||
assertEquals( 123, stats.getNewFileCount() );
|
||||
|
@ -121,7 +120,7 @@ public class RepositoryStatisticsManagerTest
|
|||
Collections.emptyList() );
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
RepositoryStatistics stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
|
||||
RepositoryStatistics stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
|
||||
assertNull( stats );
|
||||
|
||||
metadataRepositoryControl.verify();
|
||||
|
@ -148,9 +147,10 @@ public class RepositoryStatisticsManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime, current, 56345, 45 );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, startTime, current, 56345,
|
||||
45 );
|
||||
|
||||
stats = repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID );
|
||||
stats = repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID );
|
||||
assertNotNull( stats );
|
||||
assertEquals( 246900, stats.getTotalArtifactFileSize() );
|
||||
assertEquals( 45, stats.getNewFileCount() );
|
||||
|
@ -195,16 +195,16 @@ public class RepositoryStatisticsManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime1, stats1.getScanEndTime(), 56345,
|
||||
45 );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, startTime2, stats2.getScanEndTime(), 56345,
|
||||
45 );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, startTime1,
|
||||
stats1.getScanEndTime(), 56345, 45 );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID, startTime2,
|
||||
stats2.getScanEndTime(), 56345, 45 );
|
||||
|
||||
assertNotNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
|
||||
assertNotNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
|
||||
|
||||
repositoryStatisticsManager.deleteStatistics( TEST_REPO_ID );
|
||||
repositoryStatisticsManager.deleteStatistics( metadataRepository, TEST_REPO_ID );
|
||||
|
||||
assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
|
||||
assertNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
|
||||
|
||||
metadataRepositoryControl.verify();
|
||||
}
|
||||
|
@ -220,11 +220,11 @@ public class RepositoryStatisticsManagerTest
|
|||
|
||||
metadataRepositoryControl.replay();
|
||||
|
||||
assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
|
||||
assertNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
|
||||
|
||||
repositoryStatisticsManager.deleteStatistics( TEST_REPO_ID );
|
||||
repositoryStatisticsManager.deleteStatistics( metadataRepository, TEST_REPO_ID );
|
||||
|
||||
assertNull( repositoryStatisticsManager.getLastStatistics( TEST_REPO_ID ) );
|
||||
assertNull( repositoryStatisticsManager.getLastStatistics( metadataRepository, TEST_REPO_ID ) );
|
||||
|
||||
metadataRepositoryControl.verify();
|
||||
}
|
||||
|
@ -257,12 +257,14 @@ public class RepositoryStatisticsManagerTest
|
|||
|
||||
for ( RepositoryStatistics stats : statsCreated.values() )
|
||||
{
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
|
||||
stats.getScanEndTime(), 56345, 45 );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
|
||||
stats.getScanStartTime(), stats.getScanEndTime(), 56345,
|
||||
45 );
|
||||
}
|
||||
|
||||
List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date(
|
||||
current.getTime() - 4000 ), new Date( current.getTime() - 2000 ) );
|
||||
List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( metadataRepository,
|
||||
TEST_REPO_ID, new Date(
|
||||
current.getTime() - 4000 ), new Date( current.getTime() - 2000 ) );
|
||||
|
||||
assertEquals( 1, list.size() );
|
||||
assertEquals( new Date( current.getTime() - 3000 ), list.get( 0 ).getScanStartTime() );
|
||||
|
@ -302,12 +304,14 @@ public class RepositoryStatisticsManagerTest
|
|||
|
||||
for ( RepositoryStatistics stats : statsCreated.values() )
|
||||
{
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
|
||||
stats.getScanEndTime(), 56345, 45 );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
|
||||
stats.getScanStartTime(), stats.getScanEndTime(), 56345,
|
||||
45 );
|
||||
}
|
||||
|
||||
List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date(
|
||||
current.getTime() - 4000 ), current );
|
||||
List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( metadataRepository,
|
||||
TEST_REPO_ID, new Date(
|
||||
current.getTime() - 4000 ), current );
|
||||
|
||||
assertEquals( 2, list.size() );
|
||||
assertEquals( new Date( current.getTime() - 3000 ), list.get( 1 ).getScanStartTime() );
|
||||
|
@ -348,12 +352,14 @@ public class RepositoryStatisticsManagerTest
|
|||
|
||||
for ( RepositoryStatistics stats : statsCreated.values() )
|
||||
{
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
|
||||
stats.getScanEndTime(), 56345, 45 );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
|
||||
stats.getScanStartTime(), stats.getScanEndTime(), 56345,
|
||||
45 );
|
||||
}
|
||||
|
||||
List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date(
|
||||
current.getTime() - 20000 ), new Date( current.getTime() - 2000 ) );
|
||||
List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( metadataRepository,
|
||||
TEST_REPO_ID, new Date(
|
||||
current.getTime() - 20000 ), new Date( current.getTime() - 2000 ) );
|
||||
|
||||
assertEquals( 2, list.size() );
|
||||
assertEquals( new Date( current.getTime() - 12345 ), list.get( 1 ).getScanStartTime() );
|
||||
|
@ -399,12 +405,14 @@ public class RepositoryStatisticsManagerTest
|
|||
|
||||
for ( RepositoryStatistics stats : statsCreated.values() )
|
||||
{
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
|
||||
stats.getScanEndTime(), 56345, 45 );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
|
||||
stats.getScanStartTime(), stats.getScanEndTime(), 56345,
|
||||
45 );
|
||||
}
|
||||
|
||||
List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date(
|
||||
current.getTime() - 20000 ), current );
|
||||
List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( metadataRepository,
|
||||
TEST_REPO_ID, new Date(
|
||||
current.getTime() - 20000 ), current );
|
||||
|
||||
assertEquals( 3, list.size() );
|
||||
assertEquals( new Date( current.getTime() - 12345 ), list.get( 2 ).getScanStartTime() );
|
||||
|
@ -435,12 +443,14 @@ public class RepositoryStatisticsManagerTest
|
|||
|
||||
for ( RepositoryStatistics stats : statsCreated.values() )
|
||||
{
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( TEST_REPO_ID, stats.getScanStartTime(),
|
||||
stats.getScanEndTime(), 56345, 45 );
|
||||
repositoryStatisticsManager.addStatisticsAfterScan( metadataRepository, TEST_REPO_ID,
|
||||
stats.getScanStartTime(), stats.getScanEndTime(), 56345,
|
||||
45 );
|
||||
}
|
||||
|
||||
List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( TEST_REPO_ID, new Date(
|
||||
current.getTime() - 20000 ), new Date( current.getTime() - 16000 ) );
|
||||
List<RepositoryStatistics> list = repositoryStatisticsManager.getStatisticsInRange( metadataRepository,
|
||||
TEST_REPO_ID, new Date(
|
||||
current.getTime() - 20000 ), new Date( current.getTime() - 16000 ) );
|
||||
|
||||
assertEquals( 0, list.size() );
|
||||
|
||||
|
|
|
@ -20,33 +20,33 @@ package org.apache.archiva.stagerepository.merge;
|
|||
*/
|
||||
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.filter.Filter;
|
||||
import org.apache.archiva.metadata.repository.filter.IncludesFilter;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.storage.RepositoryPathTranslator;
|
||||
import org.apache.maven.archiva.repository.RepositoryException;
|
||||
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
|
||||
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
|
||||
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
|
||||
import org.apache.maven.archiva.common.utils.VersionComparator;
|
||||
import org.apache.maven.archiva.common.utils.VersionUtil;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
import org.apache.maven.archiva.model.ArchivaRepositoryMetadata;
|
||||
import org.apache.maven.archiva.common.utils.VersionComparator;
|
||||
import org.apache.maven.archiva.common.utils.VersionUtil;
|
||||
import org.apache.maven.archiva.repository.RepositoryException;
|
||||
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataException;
|
||||
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataReader;
|
||||
import org.apache.maven.archiva.repository.metadata.RepositoryMetadataWriter;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Date;
|
||||
import java.util.Calendar;
|
||||
import java.util.TimeZone;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.io.IOException;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.text.DateFormat;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.TimeZone;
|
||||
|
||||
/**
|
||||
* @plexus.component role="org.apache.archiva.stagerepository.merge.RepositoryMerger" role-hint="maven2"
|
||||
|
@ -54,12 +54,6 @@ import java.text.SimpleDateFormat;
|
|||
public class Maven2RepositoryMerger
|
||||
implements RepositoryMerger
|
||||
{
|
||||
|
||||
/**
|
||||
* @plexus.requirement role-hint="default"
|
||||
*/
|
||||
private MetadataRepository metadataRepository;
|
||||
|
||||
/**
|
||||
* @plexus.requirement role-hint="default"
|
||||
*/
|
||||
|
@ -77,12 +71,7 @@ public class Maven2RepositoryMerger
|
|||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
public void setMetadataRepository( MetadataRepository metadataRepository )
|
||||
{
|
||||
this.metadataRepository = metadataRepository;
|
||||
}
|
||||
|
||||
public void merge( String sourceRepoId, String targetRepoId )
|
||||
public void merge( MetadataRepository metadataRepository, String sourceRepoId, String targetRepoId )
|
||||
throws Exception
|
||||
{
|
||||
|
||||
|
@ -95,7 +84,8 @@ public class Maven2RepositoryMerger
|
|||
}
|
||||
|
||||
// TODO when UI needs a subset to merge
|
||||
public void merge( String sourceRepoId, String targetRepoId, Filter<ArtifactMetadata> filter )
|
||||
public void merge( MetadataRepository metadataRepository, String sourceRepoId, String targetRepoId,
|
||||
Filter<ArtifactMetadata> filter )
|
||||
throws Exception
|
||||
{
|
||||
List<ArtifactMetadata> sourceArtifacts = metadataRepository.getArtifacts( sourceRepoId );
|
||||
|
@ -183,15 +173,16 @@ public class Maven2RepositoryMerger
|
|||
{
|
||||
|
||||
// updating version metadata files
|
||||
File versionMetaDataFileInSourceRepo =
|
||||
pathTranslator.toFile( new File( sourceRepoPath ), artifactMetadata.getNamespace(),
|
||||
artifactMetadata.getProject(), artifactMetadata.getVersion(),
|
||||
METADATA_FILENAME );
|
||||
File versionMetaDataFileInSourceRepo = pathTranslator.toFile( new File( sourceRepoPath ),
|
||||
artifactMetadata.getNamespace(),
|
||||
artifactMetadata.getProject(),
|
||||
artifactMetadata.getVersion(),
|
||||
METADATA_FILENAME );
|
||||
|
||||
if( versionMetaDataFileInSourceRepo.exists() )
|
||||
if ( versionMetaDataFileInSourceRepo.exists() )
|
||||
{
|
||||
String relativePathToVersionMetadataFile =
|
||||
versionMetaDataFileInSourceRepo.getAbsolutePath().split( sourceRepoPath )[1];
|
||||
String relativePathToVersionMetadataFile = versionMetaDataFileInSourceRepo.getAbsolutePath().split(
|
||||
sourceRepoPath )[1];
|
||||
File versionMetaDataFileInTargetRepo = new File( targetRepoPath, relativePathToVersionMetadataFile );
|
||||
|
||||
if ( !versionMetaDataFileInTargetRepo.exists() )
|
||||
|
@ -209,10 +200,10 @@ public class Maven2RepositoryMerger
|
|||
String projectDirectoryInSourceRepo = new File( versionMetaDataFileInSourceRepo.getParent() ).getParent();
|
||||
File projectMetadataFileInSourceRepo = new File( projectDirectoryInSourceRepo, METADATA_FILENAME );
|
||||
|
||||
if( projectMetadataFileInSourceRepo.exists() )
|
||||
if ( projectMetadataFileInSourceRepo.exists() )
|
||||
{
|
||||
String relativePathToProjectMetadataFile =
|
||||
projectMetadataFileInSourceRepo.getAbsolutePath().split( sourceRepoPath )[1];
|
||||
String relativePathToProjectMetadataFile = projectMetadataFileInSourceRepo.getAbsolutePath().split(
|
||||
sourceRepoPath )[1];
|
||||
File projectMetadataFileInTargetRepo = new File( targetRepoPath, relativePathToProjectMetadataFile );
|
||||
|
||||
if ( !projectMetadataFileInTargetRepo.exists() )
|
||||
|
@ -332,7 +323,8 @@ public class Maven2RepositoryMerger
|
|||
return metadata;
|
||||
}
|
||||
|
||||
public List<ArtifactMetadata> getConflictingArtifacts( String sourceRepo, String targetRepo )
|
||||
public List<ArtifactMetadata> getConflictingArtifacts( MetadataRepository metadataRepository, String sourceRepo,
|
||||
String targetRepo )
|
||||
throws Exception
|
||||
{
|
||||
List<ArtifactMetadata> targetArtifacts = metadataRepository.getArtifacts( targetRepo );
|
||||
|
@ -364,9 +356,9 @@ public class Maven2RepositoryMerger
|
|||
boolean isSame = false;
|
||||
|
||||
if ( ( sourceArtifact.getNamespace().equals( targetArtifact.getNamespace() ) ) &&
|
||||
( sourceArtifact.getProject().equals( targetArtifact.getProject() ) ) &&
|
||||
( sourceArtifact.getId().equals( targetArtifact.getId() ) ) &&
|
||||
( sourceArtifact.getProjectVersion().equals( targetArtifact.getProjectVersion() ) ) )
|
||||
( sourceArtifact.getProject().equals( targetArtifact.getProject() ) ) && ( sourceArtifact.getId().equals(
|
||||
targetArtifact.getId() ) ) && ( sourceArtifact.getProjectVersion().equals(
|
||||
targetArtifact.getProjectVersion() ) ) )
|
||||
|
||||
{
|
||||
isSame = true;
|
||||
|
|
|
@ -19,18 +19,22 @@ package org.apache.archiva.stagerepository.merge;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.repository.filter.Filter;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface RepositoryMerger
|
||||
{
|
||||
void merge( String sourceRepoId, String targetRepoId )
|
||||
void merge( MetadataRepository metadataRepository, String sourceRepoId, String targetRepoId )
|
||||
throws Exception;
|
||||
|
||||
void merge( String sourceRepoId, String targetRepoId, Filter<ArtifactMetadata> filter ) throws Exception;
|
||||
|
||||
public List<ArtifactMetadata> getConflictingArtifacts( String sourceRepo, String targetRepo )
|
||||
void merge( MetadataRepository metadataRepository, String sourceRepoId, String targetRepoId,
|
||||
Filter<ArtifactMetadata> filter )
|
||||
throws Exception;
|
||||
|
||||
public List<ArtifactMetadata> getConflictingArtifacts( MetadataRepository metadataRepository, String sourceRepo,
|
||||
String targetRepo )
|
||||
throws Exception;
|
||||
}
|
|
@ -19,23 +19,21 @@ package org.apache.archiva.stagerepository.merge;
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
|
||||
import org.apache.maven.archiva.configuration.Configuration;
|
||||
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
|
||||
import org.apache.maven.archiva.configuration.RepositoryScanningConfiguration;
|
||||
import org.apache.maven.archiva.repository.RepositoryContentFactory;
|
||||
import org.apache.archiva.metadata.repository.MetadataRepository;
|
||||
import org.apache.archiva.metadata.model.ArtifactMetadata;
|
||||
import org.mockito.*;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import org.codehaus.plexus.spring.PlexusInSpringTestCase;
|
||||
import org.junit.Before;
|
||||
import org.mockito.MockitoAnnotations;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
public class Maven2RepositoryMergerTest
|
||||
extends PlexusInSpringTestCase
|
||||
|
@ -43,17 +41,6 @@ public class Maven2RepositoryMergerTest
|
|||
|
||||
private static final String TEST_REPO_ID = "test";
|
||||
|
||||
private static final String TARGET_REPOSITORY_ID = "target-repo";
|
||||
|
||||
private Configuration config;
|
||||
|
||||
@MockitoAnnotations.Mock
|
||||
private MetadataRepository metadataResolver;
|
||||
|
||||
private RepositoryContentFactory repositoryFactory;
|
||||
|
||||
private ArchivaConfiguration configuration;
|
||||
|
||||
private Maven2RepositoryMerger repositoryMerger;
|
||||
|
||||
private MetadataRepository metadataRepository;
|
||||
|
@ -66,7 +53,6 @@ public class Maven2RepositoryMergerTest
|
|||
MockitoAnnotations.initMocks( this );
|
||||
metadataRepository = mock( MetadataRepository.class );
|
||||
repositoryMerger = (Maven2RepositoryMerger) lookup( RepositoryMerger.class, "maven2" );
|
||||
repositoryMerger.setMetadataRepository( metadataRepository );
|
||||
}
|
||||
|
||||
private List<ArtifactMetadata> getArtifacts()
|
||||
|
@ -106,7 +92,7 @@ public class Maven2RepositoryMergerTest
|
|||
configuration.save( c );
|
||||
|
||||
when( metadataRepository.getArtifacts( TEST_REPO_ID ) ).thenReturn( getArtifacts() );
|
||||
repositoryMerger.merge( TEST_REPO_ID, "target-rep" );
|
||||
repositoryMerger.merge( metadataRepository, TEST_REPO_ID, "target-rep" );
|
||||
verify( metadataRepository ).getArtifacts( TEST_REPO_ID );
|
||||
}
|
||||
|
||||
|
@ -154,7 +140,8 @@ public class Maven2RepositoryMergerTest
|
|||
when( metadataRepository.getArtifacts( sourceRepoId ) ).thenReturn( sourceRepoArtifactsList );
|
||||
when( metadataRepository.getArtifacts( TEST_REPO_ID ) ).thenReturn( targetRepoArtifactsList );
|
||||
|
||||
assertEquals( 1, repositoryMerger.getConflictingArtifacts( sourceRepoId, TEST_REPO_ID ).size() );
|
||||
assertEquals( 1, repositoryMerger.getConflictingArtifacts( metadataRepository, sourceRepoId,
|
||||
TEST_REPO_ID ).size() );
|
||||
verify( metadataRepository ).getArtifacts( TEST_REPO_ID );
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue